diff --git a/Dockerfile b/Dockerfile index 827347bd2..b877c4260 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM oraclelinux:7-slim +FROM oraclelinux:9-slim LABEL maintainer="Team at Oracle" LABEL description="OCI format to generate CD3 image" @@ -6,13 +6,16 @@ ARG USERNAME=cd3user ARG USER_UID=1001 ARG USER_GID=$USER_UID -RUN yum install sudo -y && groupadd --gid $USER_GID $USERNAME \ -&& useradd --uid $USER_UID --gid $USER_GID -d /$USERNAME -m $USERNAME \ -&& echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \ -&& chmod 0440 /etc/sudoers.d/$USERNAME \ -&& mkdir -p /cd3user/tenancies && sudo chown -R $USERNAME:$USERNAME /cd3user/tenancies/ \ -&& yum install -y vim && echo 'alias vi="vim"' >> /etc/bashrc - +RUN microdnf install -y sudo && \ + groupadd --gid $USER_GID $USERNAME && \ + useradd --uid $USER_UID --gid $USER_GID -d /$USERNAME -m $USERNAME && \ + echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME && \ + chmod 0440 /etc/sudoers.d/$USERNAME && \ + mkdir -p /cd3user/tenancies && \ + chown -R $USERNAME:$USERNAME /cd3user/tenancies/ && \ + microdnf install -y vim && \ + microdnf install -y dnf && \ + echo 'alias vi="vim"' >> /etc/bashrc USER $USERNAME WORKDIR /cd3user/oci_tools/ @@ -20,18 +23,19 @@ COPY cd3_automation_toolkit cd3_automation_toolkit/ WORKDIR /cd3user/ -RUN sudo yum install -y oracle-softwarecollection-release-el7 \ -&& sudo chown -R $USERNAME:$USERNAME /cd3user/ - -RUN sudo sed -i -e 's/\r$//' /cd3user/oci_tools/cd3_automation_toolkit/shell_script.sh \ -&& bash /cd3user/oci_tools/cd3_automation_toolkit/shell_script.sh \ -&& sudo chown -R cd3user:cd3user /cd3user/ && sudo yum clean all && sudo rm -rf /var/cache/yum \ -&& sudo chmod -R 740 /cd3user/ +RUN sudo dnf install -y oraclelinux-release-el9 && \ +sudo chown -R $USERNAME:$USERNAME /cd3user/ && \ +sudo sed -i -e 's/\r$//' /cd3user/oci_tools/cd3_automation_toolkit/shell_script.sh && \ +bash /cd3user/oci_tools/cd3_automation_toolkit/shell_script.sh && \ +sudo chown -R cd3user:cd3user /cd3user/ && \ +sudo dnf clean all && \ +sudo rm -rf /var/cache/dnf && \ +sudo chmod -R 740 /cd3user/ ##################################### START INSTALLING JENKINS ################################### -ARG JENKINS_VERSION=2.401.1 -ARG JENKINS_SHA=600b73eabf797852e39919541b84f7686ff601b97c77b44eb00843eb91c7dd6c +ARG JENKINS_VERSION=2.444 +ARG JENKINS_SHA=ab093a455fc35951c9b46361002e17cc3ed7c59b0943bbee3a57a363f3370d2e ARG JENKINS_PLUGIN_MANAGER_VERSION=2.12.13 ARG PLUGIN_CLI_URL=https://github.com/jenkinsci/plugin-installation-manager-tool/releases/download/${JENKINS_PLUGIN_MANAGER_VERSION}/jenkins-plugin-manager-${JENKINS_PLUGIN_MANAGER_VERSION}.jar @@ -39,18 +43,16 @@ ARG JENKINS_HOME=/cd3user/tenancies/jenkins_home ARG JENKINS_INSTALL=/usr/share/jenkins ARG REF=/usr/share/jenkins/ref -RUN sudo yum remove java-1.8.0-openjdk-1.8.0.345.b01-1.el7_9.x86_64 \ -&& sudo yum install -y java-11-openjdk \ -&& sudo yum install -y java-11-openjdk-devel \ -&& sudo yum install unzip -y \ -&& sudo yum install git -y \ -&& sudo mkdir -p ${REF}/init.groovy.d \ -&& sudo chown -R cd3user:cd3user ${JENKINS_INSTALL} \ -&& sudo curl -fsSL http://updates.jenkins-ci.org/download/war/${JENKINS_VERSION}/jenkins.war -o ${JENKINS_INSTALL}/jenkins.war \ -&& echo "${JENKINS_SHA} ${JENKINS_INSTALL}/jenkins.war" | sha256sum -c - \ -&& sudo curl -fsSL ${PLUGIN_CLI_URL} -o ${JENKINS_INSTALL}/jenkins-plugin-manager.jar +RUN sudo microdnf install -y java-21-openjdk && \ + sudo microdnf install -y java-21-openjdk-devel && \ + sudo microdnf install git-2.39.3 -y && \ + sudo mkdir -p ${REF}/init.groovy.d && \ + sudo chown -R cd3user:cd3user ${JENKINS_INSTALL} && \ + sudo curl -fsSL http://updates.jenkins-ci.org/download/war/${JENKINS_VERSION}/jenkins.war -o ${JENKINS_INSTALL}/jenkins.war && \ + echo "${JENKINS_SHA} ${JENKINS_INSTALL}/jenkins.war" | sha256sum -c - && \ + sudo curl -fsSL ${PLUGIN_CLI_URL} -o ${JENKINS_INSTALL}/jenkins-plugin-manager.jar -ENV JAVA_HOME /usr/lib/jvm/java-11-openjdk-11.0.17.0.8-2.el8_6.x86_64 +ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk ENV JENKINS_HOME ${JENKINS_HOME} ENV JENKINS_INSTALL ${JENKINS_INSTALL} ENV REF ${REF} @@ -65,7 +67,6 @@ COPY --chown=cd3user:cd3user jenkins_install ${JENKINS_INSTALL}/ COPY --chown=cd3user:cd3user jenkins_install/init/*.groovy ${REF}/init.groovy.d/ COPY --chown=cd3user:cd3user jenkins_install/plugins.txt ${REF}/plugins.txt - -RUN sudo java -jar ${JENKINS_INSTALL}/jenkins-plugin-manager.jar --war ${JENKINS_INSTALL}/jenkins.war --verbose -f ${REF}/plugins.txt \ -&& sudo chown -R cd3user:cd3user ${JENKINS_INSTALL} \ -&& sudo chmod +x ${JENKINS_INSTALL}/jenkins.sh +RUN sudo java -jar ${JENKINS_INSTALL}/jenkins-plugin-manager.jar --war ${JENKINS_INSTALL}/jenkins.war --verbose -f ${REF}/plugins.txt && \ + sudo chown -R cd3user:cd3user ${JENKINS_INSTALL} && \ + sudo chmod +x ${JENKINS_INSTALL}/jenkins.sh diff --git a/OCIWorkVMStack/data_sources.tf b/OCIWorkVMStack/data_sources.tf index 159f9ea9d..19f9c38b7 100644 --- a/OCIWorkVMStack/data_sources.tf +++ b/OCIWorkVMStack/data_sources.tf @@ -19,16 +19,17 @@ data "oci_identity_compartment" "compartment" { data "oci_core_images" "oracle_linux" { compartment_id = var.tenancy_ocid - operating_system = "Oracle Linux" - shape = var.instance_shape + #operating_system = "Oracle Linux" + #shape = var.instance_shape + display_name = var.instance_os_version sort_by = "TIMECREATED" sort_order = "DESC" state = "AVAILABLE" # filter restricts to OL - filter { - name = "operating_system_version" - values = ["${local.os_version}"] - regex = false - } + #filter { + # name = "operating_system_version" + # values = ["${local.os_version}"] + # regex = false + #} } diff --git a/OCIWorkVMStack/locals.tf b/OCIWorkVMStack/locals.tf index 4751182b5..3622850c1 100644 --- a/OCIWorkVMStack/locals.tf +++ b/OCIWorkVMStack/locals.tf @@ -19,7 +19,7 @@ locals { listing_resource_id = var.mp_listing_resource_id listing_resource_version = var.mp_listing_resource_version - os_version = var.instance_os_version == "Oracle-Linux-9" ? 9 : (var.instance_os_version == "Oracle-Linux-8" ? 8 : 7.9) + #os_version = var.instance_os_version == "Oracle-Linux-9" ? 9 : (var.instance_os_version == "Oracle-Linux-8" ? 8 : 7.9) instance_image_ocid = data.oci_core_images.oracle_linux.images[0].id diff --git a/OCIWorkVMStack/schema.yaml b/OCIWorkVMStack/schema.yaml index 7bae304d2..3aaeceb8a 100644 --- a/OCIWorkVMStack/schema.yaml +++ b/OCIWorkVMStack/schema.yaml @@ -127,8 +127,10 @@ variables: description: Oracle Linux image OCID for VM provisioning type: enum enum: - - "Oracle-Linux-7" - default: "Oracle-Linux-7" + - "Oracle-Linux-7.9-2024.02.26-0" + - "Oracle-Linux-8.9-2024.02.26-0" + - "Oracle-Linux-9.3-2024.02.26-0" + default: "Oracle-Linux-9.3-2024.02.26-0" #pattern: '^ocid1\.([a-z0-9_-]{1,32})\.([a-z0-9_-]{1,15})\.([a-z0-9]{0,24})\.([a-z0-9]{60})$' required: true instance_shape: @@ -429,4 +431,4 @@ variables: mp_listing_resource_version: type: string tenancy_ocid: - type: string + type: string \ No newline at end of file diff --git a/OCIWorkVMStack/scripts/installToolkit.sh b/OCIWorkVMStack/scripts/installToolkit.sh index 0ebd4fb85..eb33b876b 100644 --- a/OCIWorkVMStack/scripts/installToolkit.sh +++ b/OCIWorkVMStack/scripts/installToolkit.sh @@ -67,7 +67,7 @@ echo "********************************************************" >> $logfile 2>&1 echo "########################################################" >> $logfile 2>&1 echo "Downloading CD3 Automation Toolkit Code from Github " >> $logfile 2>&1 echo "########################################################" >> $logfile 2>&1 -sudo git clone https://github.com/oracle-devrel/cd3-automation-toolkit.git -b develop $toolkit_dir >> $logfile 2>&1 +sudo git clone https://github.com/oracle-devrel/cd3-automation-toolkit.git -b develop $toolkit_dir >> $logfile 2>&1 stop_exec sudo ls -la /tmp/githubCode >> $logfile 2>&1 echo "Downloading CD3 Automation Toolkit Code from Github completed successfully" >> $logfile 2>&1 diff --git a/README.md b/README.md index 00679df88..b519a6110 100755 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@
- [What's New](https://github.com/oracle-devrel/cd3-automation-toolkit/releases/tag/v2024.2.1)   •   [Excel Templates](https://oracle-devrel.github.io/cd3-automation-toolkit/excel-templates)   •    [CD3 Docs](https://oracle-devrel.github.io/cd3-automation-toolkit/)   •   [Watch & Learn](https://www.youtube.com/playlist?list=PLPIzp-E1msrbJ3WawXVhzimQnLw5iafcp)   •  [Blogs & Tutorials](https://oracle-devrel.github.io/cd3-automation-toolkit/tutorials/)   •  [OCI CD3-Livelabs](https://apexapps.oracle.com/pls/apex/f?p=133:180:112501098061930::::wid:3724) + [What's New](https://github.com/oracle-devrel/cd3-automation-toolkit/releases/tag/v2024.2.2)   •   [Excel Templates](https://oracle-devrel.github.io/cd3-automation-toolkit/excel-templates)   •    [CD3 Docs](https://oracle-devrel.github.io/cd3-automation-toolkit/)   •   [Watch & Learn](https://www.youtube.com/playlist?list=PLPIzp-E1msrbJ3WawXVhzimQnLw5iafcp)   •  [Blogs & Tutorials](https://oracle-devrel.github.io/cd3-automation-toolkit/tutorials/)   •  [OCI CD3-Livelabs](https://apexapps.oracle.com/pls/apex/f?p=133:180:112501098061930::::wid:3724)
diff --git a/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py b/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py index 0c7eea34f..c2a16a46a 100644 --- a/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py +++ b/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py @@ -216,23 +216,23 @@ def create_terraform_tags(inputfile, outdir, service_dir, prefix, ct): if default_value != "" and str(default_value).lower() != "nan": if '$' in default_value and default_value.count('$') == 1: default_value = str(default_value).strip().replace('$','$$') - #is_required = 'false' #Uncomment this line if needed - columnvalue = key_tf_name+"="+default_compartment+"="+default_value#+"="+is_required #Uncomment this if needed + is_required = 'false' #Uncomment this line if needed + columnvalue = key_tf_name+"="+default_compartment+"="+default_value+"="+is_required #Uncomment this if needed if columnvalue not in default_tags: default_tags.append(columnvalue) else: if default_value == '' or default_value.strip().lower() == 'nan': if str(df.loc[i,'Validator']).strip() != '' and str(df.loc[i,'Validator']).strip().lower() != 'nan' and str(df.loc[i,'Validator']).strip() != []: - #is_required_updated = 'true' #Uncomment this if needed + is_required_updated = 'true' #Uncomment this if needed default_value = values_list[0] - columnvalue = key_tf_name+"="+default_compartment+"="+default_value#+"="+is_required_updated #Uncomment this if needed + columnvalue = key_tf_name+"="+default_compartment+"="+default_value+"="+is_required_updated #Uncomment this if needed if columnvalue not in default_tags: default_tags.append(columnvalue) else: if str(df.loc[i, 'Validator']).strip() == '' or str(df.loc[i, 'Validator']).strip().lower() == 'nan': - #is_required_updated = 'true' #Uncomment this if needed + is_required_updated = 'true' #Uncomment this if needed default_value = '-' - columnvalue = key_tf_name+"="+default_compartment+"="+default_value#+"="+is_required_updated #Uncomment this if needed + columnvalue = key_tf_name+"="+default_compartment+"="+default_value+"="+is_required_updated #Uncomment this if needed if columnvalue not in default_tags: default_tags.append(columnvalue) diff --git a/cd3_automation_toolkit/Governance/Tagging/export_tags_nonGreenField.py b/cd3_automation_toolkit/Governance/Tagging/export_tags_nonGreenField.py index 8c79a4439..9d4a9e500 100644 --- a/cd3_automation_toolkit/Governance/Tagging/export_tags_nonGreenField.py +++ b/cd3_automation_toolkit/Governance/Tagging/export_tags_nonGreenField.py @@ -149,7 +149,10 @@ def export_tags_nongreenfield(inputfile, outdir, service_dir, config, signer, ct if tag_defaults.data != []: for tag_default in tag_defaults.data: if tag_default.tag_definition_name != '(deleted tag definition)': - add_values_in_dict(tag_default_comps_map, tag_default.tag_definition_id+"="+tag_default.tag_definition_name, [ntk_compartment_name+"="+tag_default.value]) + my_val=tag_default.value + if tag_default.is_required==True: + my_val="" + add_values_in_dict(tag_default_comps_map, tag_default.tag_definition_id+"="+tag_default.tag_definition_name, [ntk_compartment_name+"="+my_val]) defaultcomp_to_tagid_map.update({ commonTools.check_tf_variable(str(tag_default.tag_definition_name).replace('\\','\\\\'))+"-"+commonTools.check_tf_variable(ntk_compartment_name) : tag_default.id }) comp_ocid_done = [] diff --git a/cd3_automation_toolkit/Governance/Tagging/templates/tags-defaults-template b/cd3_automation_toolkit/Governance/Tagging/templates/tags-defaults-template index 6fe837dab..524068db5 100644 --- a/cd3_automation_toolkit/Governance/Tagging/templates/tags-defaults-template +++ b/cd3_automation_toolkit/Governance/Tagging/templates/tags-defaults-template @@ -22,7 +22,7 @@ tag_defaults = { compartment_id = "{{ tags.split('=')[1] }}" value = "{{ tags.split('=')[2] }}" - {# is_required = {{ tags.split('=')[3] }} #} {# Uncomment this line if needed #} + is_required = {{ tags.split('=')[3] }} }, {% endfor %} diff --git a/cd3_automation_toolkit/Identity/Compartments/create_terraform_compartments.py b/cd3_automation_toolkit/Identity/Compartments/create_terraform_compartments.py index dc9fd35e2..1a89d0cab 100644 --- a/cd3_automation_toolkit/Identity/Compartments/create_terraform_compartments.py +++ b/cd3_automation_toolkit/Identity/Compartments/create_terraform_compartments.py @@ -243,7 +243,7 @@ def travel(parent, keys, values, c): oname[reg].close() print(outfile[reg] + " for Compartments has been created for region " + reg) - fetch_comp_file = f'{outdir}/fetchcompinfo.safe' + fetch_comp_file = f'{outdir}/.safe/fetchcompinfo.safe' with open(fetch_comp_file, 'w') as f: f.write('run_fetch_script=1') f.close() diff --git a/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py b/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py index e409ed891..f8ee99793 100644 --- a/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py +++ b/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py @@ -118,15 +118,16 @@ def print_events(values_for_column_events, region, ntk_compartment_name, event, data = str(condition["data"]) else: data = "{}" - for val in condition["eventType"]: - if "oraclecloud" in val: - service = val.split("com.oraclecloud.")[1] - elif "oracle" in val: - service = val.split("com.oracle.")[1] - event_prod = service.split('.', 1)[0] - event_res = service.split('.', 1)[1] - if ( action_name != "" ): - events_rows(values_for_column_events, region, ntk_compartment_name, event_name, event_desc, action_type, action_is_enabled, action_description, event_prod, event_res,data, event_is_enabled, action_name, event, event_info) + if "eventType" in condition: + for val in condition["eventType"]: + if "oraclecloud" in val: + service = val.split("com.oraclecloud.")[1] + elif "oracle" in val: + service = val.split("com.oracle.")[1] + event_prod = service.split('.', 1)[0] + event_res = service.split('.', 1)[1] + if ( action_name != "" ): + events_rows(values_for_column_events, region, ntk_compartment_name, event_name, event_desc, action_type, action_is_enabled, action_description, event_prod, event_res,data, event_is_enabled, action_name, event, event_info) if ( i > 0 and action_name != ""): events_rows(values_for_column_events, region, ntk_compartment_name, event_name, event_desc, action_type, action_is_enabled, action_description, event_prod, event_res,data, event_is_enabled, action_name, event, event_info) i = i + 1 diff --git a/cd3_automation_toolkit/ManagementServices/Monitoring/create_terraform_alarms.py b/cd3_automation_toolkit/ManagementServices/Monitoring/create_terraform_alarms.py index 66496d6dc..aa1ce0cbf 100644 --- a/cd3_automation_toolkit/ManagementServices/Monitoring/create_terraform_alarms.py +++ b/cd3_automation_toolkit/ManagementServices/Monitoring/create_terraform_alarms.py @@ -161,7 +161,7 @@ def create_terraform_alarms(inputfile, outdir, service_dir, prefix, ct): # Write all info to TF string - tfStr[region]=tfStr[region][:-1] +alarms_template.render(tempStr) + tfStr[region]=tfStr[region][:-2] +alarms_template.render(tempStr) # Write to output for reg in ct.all_regions: diff --git a/cd3_automation_toolkit/ManagementServices/Monitoring/templates/alarms-template b/cd3_automation_toolkit/ManagementServices/Monitoring/templates/alarms-template index 6070aa289..3e0823d91 100644 --- a/cd3_automation_toolkit/ManagementServices/Monitoring/templates/alarms-template +++ b/cd3_automation_toolkit/ManagementServices/Monitoring/templates/alarms-template @@ -22,7 +22,13 @@ alarms = { query = "{{ query }}" severity = "{{ severity }}" {% if body and body != "" %} + {% if '\n' not in body %} body = "{{ body }}" + {% else %} + body = <<-EOF + {{ body }} + EOF + {% endif %} {% endif %} {% if message_format and message_format != "" %} diff --git a/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py b/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py index 247051616..f79a04dc9 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py @@ -36,10 +36,11 @@ def create_all_tf_objects(inputfile, outdir, service_dir,prefix, ct, non_gf_tena with section('Process DRGs tab for DRG Route Tables and Route Distribution creation'): create_terraform_drg_route(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy, network_connectivity_in_setupoci, modify_network) + #Create Workflow if non_gf_tenancy == False: with section('Process Subnets tab for Routes creation'): create_terraform_route(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy, network_vlan_in_setupoci, modify_network) - + # Create Workflow if non_gf_tenancy == False: with section('Process Subnets for Seclists creation'): create_terraform_seclist(inputfile, outdir, service_dir_network, prefix, ct, modify_network) diff --git a/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py b/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py index bc53e929f..ff633e3dc 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py @@ -300,8 +300,8 @@ def get_rpc_resources(source_region, SOURCE_RPC_LIST, dest_rpc_dict, rpc_source_ dest_drg_rt_name = "" drg_rt_import_dist_name = "" dest_drg_rt_import_dist_name = "" - source_rpc_comp_name = "" - dest_rpc_comp_name = "" + source_drg_comp_name = "" + dest_drg_comp_name = "" dest_rpc_display_name = "" dest_import_rt_statements = None import_rt_statements = None @@ -330,12 +330,13 @@ def get_comp_details(comp_data): # Check peering is alive if source_rpc_peer_id is not None and new_rpc.peering_status == "PEERED": - source_rpc_comp_name = get_comp_details(new_rpc.compartment_id) source_rpc_display_name = new_rpc.display_name source_rpc_drg_id = new_rpc.drg_id dest_rpc_id = new_rpc.peer_id dest_region = new_rpc.peer_region_name.split("-")[1] source_rpc_drg_name = getattr(rpc_source_client.get_drg(drg_id=source_rpc_drg_id).data, 'display_name') + source_drg_comp_name = get_comp_details( + getattr(rpc_source_client.get_drg(drg_id=source_rpc_drg_id).data, 'compartment_id')) rpc_tf_name = commonTools.check_tf_variable(new_rpc.display_name) # Fetch source attach list id @@ -377,7 +378,7 @@ def get_comp_details(comp_data): remote_peering_connection_id=source_rpc_peer_id) dest_rpc_drg_id = dest_rpc.drg_id dest_rpc_drg_name = getattr(client.get_drg(drg_id=dest_rpc_drg_id).data, 'display_name') - dest_rpc_comp_name = get_comp_details(dest_rpc.compartment_id) + dest_drg_comp_name = get_comp_details(getattr(client.get_drg(drg_id=dest_rpc_drg_id).data, 'compartment_id')) dest_rpc_display_name = dest_rpc.display_name dest_drg_rpc_attachment_list = client.list_drg_attachments( compartment_id=dest_rpc_comp_id, attachment_type="REMOTE_PEERING_CONNECTION", @@ -407,16 +408,16 @@ def get_comp_details(comp_data): importCommands_rpc["global"].write( "\nterraform import \"module.rpcs[\\\"" + rpc_tf_name + f"\\\"].oci_core_remote_peering_connection.{source_region.lower()}_{region.lower()}_accepter_rpc[\\\"region\\\"]\" " + str( dest_rpc_id)) - + importCommands_rpc["global"].write("\nterraform plan") for col_header in values_for_column: if col_header == 'Region': values_for_column[col_header].append(source_region) elif col_header == 'Attached To': - # Format is RPC::region::dest_rpc_comp_name::dest_rpc_drg_name + # Format is RPC::region::dest_rpc_drg_name attach_to = "RPC::" + dest_region.lower() + "::" + dest_rpc_drg_name values_for_column[col_header].append(attach_to) elif col_header == 'Compartment Name': - values_for_column[col_header].append(source_rpc_comp_name) + values_for_column[col_header].append(source_drg_comp_name) elif col_header == 'RPC Display Name': values_for_column[col_header].append(source_rpc_display_name) elif col_header == 'DRG Name': @@ -461,11 +462,11 @@ def get_comp_details(comp_data): if col_header == 'Region': values_for_column[col_header].append(dest_region.capitalize()) elif col_header == 'Attached To': - # Format is RPC::region::dest_rpc_comp_name::dest_rpc_drg_name + # Format is RPC::region::source_rpc_drg_name attach_to = "RPC::" + source_region.lower() + "::" + source_rpc_drg_name values_for_column[col_header].append(attach_to) elif col_header == 'Compartment Name': - values_for_column[col_header].append(dest_rpc_comp_name) + values_for_column[col_header].append(dest_drg_comp_name) elif col_header == 'RPC Display Name': values_for_column[col_header].append(dest_rpc_display_name) elif col_header == 'DRG Name': @@ -1118,4 +1119,4 @@ def export_networking(inputfile, outdir, service_dir, config, signer, ct, export export_drg_routetable(inputfile, outdir, service_dir_network, config1=config, signer1=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions, _tf_import_cmd=True) # Fetch NSGs - export_nsg(inputfile, outdir, service_dir_nsg, config=config, signer=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions, _tf_import_cmd=True) + export_nsg(inputfile, outdir, service_dir_nsg, config=config, signer=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions, _tf_import_cmd=True) \ No newline at end of file diff --git a/cd3_automation_toolkit/Network/Global/templates/rpc-root-terraform-template b/cd3_automation_toolkit/Network/Global/templates/rpc-root-terraform-template index c83516c58..0f45d6ff8 100644 --- a/cd3_automation_toolkit/Network/Global/templates/rpc-root-terraform-template +++ b/cd3_automation_toolkit/Network/Global/templates/rpc-root-terraform-template @@ -8,8 +8,8 @@ module "rpcs" { source = "../modules/rpc" for_each = var.drg_other_attachments - requester_compartment_id = each.value.requester_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.requester_compartment_id)) > 0 ? each.value.requester_compartment_id : var.compartment_ocids[each.value.requester_compartment_id]) : null - accepter_compartment_id = each.value.accepter_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.accepter_compartment_id)) > 0 ? each.value.accepter_compartment_id : var.compartment_ocids[each.value.accepter_compartment_id]) : null + requester_compartment_id = each.value.requester_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.requester_compartment_id)) > 0 ? each.value.requester_compartment_id : var.compartment_ocids[each.value.requester_compartment_id]) : null + accepter_compartment_id = each.value.accepter_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.accepter_compartment_id)) > 0 ? each.value.accepter_compartment_id : var.compartment_ocids[each.value.accepter_compartment_id]) : null display_name = each.value.display_name #Requester diff --git a/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py b/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py index 0bb8bbf5a..b08342380 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py +++ b/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py @@ -128,7 +128,7 @@ def print_nlb_backendset_backendserver(region, ct, values_for_column_bss,NLBs, n elif col_header == "Backend Set Name": values_for_column_bss[col_header].append(backendsets) else: - oci_objs = [eachnlb,backendset_details] + oci_objs = [eachnlb,backendset_details,hc] values_for_column_bss = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict_bss,values_for_column_bss) return values_for_column_bss @@ -145,7 +145,6 @@ def print_nlb_listener(region, outdir, values_for_column_lis, NLBs, nlb_compartm if 'ocid1.cluster' in created_by: continue - importCommands[reg] = open(outdir + "/" + reg + "/tf_import_commands_nlb_nonGF.sh", "a") nlb_display_name = eachnlb.display_name tf_name = commonTools.check_tf_variable(nlb_display_name) importCommands[reg].write("\nterraform import \"module.network-load-balancers[\\\"" + str(tf_name) + "\\\"].oci_network_load_balancer_network_load_balancer.network_load_balancer\" " + eachnlb.id) diff --git a/cd3_automation_toolkit/Network/LoadBalancers/templates/backend-set-template b/cd3_automation_toolkit/Network/LoadBalancers/templates/backend-set-template index 0e384e7e9..77ceda4e0 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/templates/backend-set-template +++ b/cd3_automation_toolkit/Network/LoadBalancers/templates/backend-set-template @@ -29,12 +29,16 @@ backend_sets = { interval_ms = "{{ interval_in_millis }}" {% endif %} + {% if response_body_regex and response_body_regex != '' %} + response_body_regex = "{{ response_body_regex }}" + {% endif %} + {% if port != '' %} port = "{{ backend_healthcheck_port }}" {% endif %} - {% if response_body_regex and response_body_regex != '' %} - response_body_regex = "{{ response_body_regex }}" + {% if is_force_plain_text and is_force_plain_text != '' %} + is_force_plain_text = {{ is_force_plain_text }} {% endif %} {% if retries and retries != '' %} diff --git a/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-backend-set-template b/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-backend-set-template index 470d36c6c..7f65b79f4 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-backend-set-template +++ b/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-backend-set-template @@ -54,14 +54,11 @@ nlb_backend_sets = { {% if request_data and request_data != '' %} request_data = "{{ request_data }}" - {% else %} - request_data = null {% endif %} + {% if response_data and response_data != '' %} response_data = "{{ response_data }}" - {% else %} - response_data = null {% endif %} {% if timeout_in_millis and timeout_in_millis != '' %} diff --git a/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-template b/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-template index 3f540c62a..29c79a245 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-template +++ b/cd3_automation_toolkit/Network/LoadBalancers/templates/nlb-template @@ -44,6 +44,10 @@ network_load_balancers = { is_preserve_source_destination = {{ is_preserve_source_destination }} {% endif %} + {% if is_symmetric_hash_enabled and is_symmetric_hash_enabled != "" %} + is_symmetric_hash_enabled = {{ is_symmetric_hash_enabled }} + {% endif %} + {% if nlb_ip_version and nlb_ip_version != "" %} nlb_ip_version = {{ nlb_ip_version }} {% endif %} diff --git a/cd3_automation_toolkit/OCI_Regions b/cd3_automation_toolkit/OCI_Regions index 4e245dce9..5e23efe50 100644 --- a/cd3_automation_toolkit/OCI_Regions +++ b/cd3_automation_toolkit/OCI_Regions @@ -1,5 +1,4 @@ #Region:Region_Key -saltlake:us-saltlake-2 amsterdam:eu-amsterdam-1 stockholm:eu-stockholm-1 abudhabi:me-abudhabi-1 diff --git a/cd3_automation_toolkit/Release-Notes b/cd3_automation_toolkit/Release-Notes index 58be28aab..db36292da 100644 --- a/cd3_automation_toolkit/Release-Notes +++ b/cd3_automation_toolkit/Release-Notes @@ -1,3 +1,21 @@ +------------------------------------- +CD3 Automation Toolkit Tag v2024.2.2 +Apr 30, 2024 +------------------------------------- +1. Download excel sheet after create and export workflow using Jenkins. +2. Docker Image upgrade to OL9 and upgrade of other softwares also. +3. Dropdowns for Region and Compartment while running setUpOCI using Jenkins. +4. Fix Image OCIDs for OCI Work VM deployment using RM stack. +5. Option to execute 3rd Party Services from CD3 - Show OCI along with CIS Compliance Check script +6. Upgrade of existing terraform modules - identity, buckets, LBaaS, NLBs (without DNS health check as of now). +7. Enhance CD3 Validator for OCI Buckets. + +------------------------------------ +CD3 Automation Toolkit Tag v2024.2.1 +Apr 4, 2024 +------------------------------------- +1. Quick bug fix for OCI RM stack directly in github. + ------------------------------------- CD3 Automation Toolkit Tag v2024.2.0 Mar 22, 2024 diff --git a/cd3_automation_toolkit/Security/Firewall/templates/policy-template b/cd3_automation_toolkit/Security/Firewall/templates/policy-template deleted file mode 100644 index 3a101264a..000000000 --- a/cd3_automation_toolkit/Security/Firewall/templates/policy-template +++ /dev/null @@ -1,45 +0,0 @@ -{% if count == 0 %} - -fw-policies = { - ##Add New firewall policy for {{ region|lower }} here## -} -{% else %} - {% if policy_name != "" and policy_name != "nan" and policy_name != null %} - {{ policy_tf_name }} = { - compartment_id = "{{ compartment_tf_name }}" - display_name = "{{ policy_name }}" - - - {# ##Do not modify below this line## #} - {# #} - {# ###Section for adding Defined and Freeform Tags### #} - {% if defined_tags and defined_tags != 'nan' and defined_tags != '' and defined_tags != [['nan']] %} - {% if defined_tags[0] %} - defined_tags = { - {% for tags in defined_tags %} - {% if not loop.last %} - "{{ tags[0] }}"= "{{ tags[1] }}" , - {% else %} - "{{ tags[0] }}"= "{{ tags[1] }}" - {% endif %} - {% endfor %} - } - {% endif %} - {% endif %} - {% if freeform_tags and freeform_tags != 'nan' and freeform_tags != '' and freeform_tags != [['nan']] %} - {% if freeform_tags[0] %} - freeform_tags = { - {% for tags in freeform_tags %} - {% if not loop.last %} - "{{ tags[0] }}"="{{ tags[1] }}", - {% else %} - "{{ tags[0] }}"="{{ tags[1] }}" - {% endif %} - {% endfor %} - } - {% endif %} - {% endif %} - {# ###Section for adding Defined and Freeform Tags ends here### #} - }, - {% endif %} -{% endif %} diff --git a/cd3_automation_toolkit/Storage/BlockVolume/templates/blockvolumes-template b/cd3_automation_toolkit/Storage/BlockVolume/templates/blockvolumes-template index a255bb0d9..152cd6805 100644 --- a/cd3_automation_toolkit/Storage/BlockVolume/templates/blockvolumes-template +++ b/cd3_automation_toolkit/Storage/BlockVolume/templates/blockvolumes-template @@ -71,7 +71,7 @@ blockvolumes = { {% endif %} {% if is_pv_encryption_in_transit_enabled %} - is_pv_encryption_in_transit_enabled = "{{ is_pv_encryption_in_transit_enabled }}" + is_pv_encryption_in_transit_enabled = {{ is_pv_encryption_in_transit_enabled }} {% endif %} {% if is_shareable %} diff --git a/cd3_automation_toolkit/cd3Validator.py b/cd3_automation_toolkit/cd3Validator.py index 5c00369a5..ec7231603 100644 --- a/cd3_automation_toolkit/cd3Validator.py +++ b/cd3_automation_toolkit/cd3Validator.py @@ -307,7 +307,7 @@ def validate_subnets(filename, comp_ids, vcnobj): cidr_list.append(entry) # Check for null values and display appropriate message - labels = ['DNS Label', 'DHCP Option Name', 'Route Table Name', 'Seclist Names'] + labels = ['DNS Label', 'DHCP Option Name', 'Route Table Name', 'Seclist Names','NSGs'] for j in dfsub.keys(): if (str(dfsub[j][i]).strip() == "NaN" or str(dfsub[j][i]).strip() == "nan" or str(dfsub[j][i]).strip() == ""): # only dhcp_option_name, route table name, seclist_names and dns_label columns can be empty @@ -1386,17 +1386,21 @@ def validate_buckets(filename, comp_ids): if columnvalue.lower() not in ['standard','archive']: log(f'ROW {i + 3} : Value of "Storage Tier" can be only either "Standard" or "Archive".') buckets_invalid_check = True - + elif columnvalue.lower() == 'archive': + auto_tiering_index = dfcolumns.index('Auto Tiering') + if auto_tiering_index != -1 and str(dfbuckets.loc[i, 'Auto Tiering']).strip().lower() == 'enabled': + log(f'ROW {i + 3} : Auto Tiering cannot be "Enabled" when Storage Tier is "Archive".') + buckets_invalid_check = True if columnname == 'Auto Tiering': if columnvalue.lower() not in ['enabled','disabled']: log(f'ROW {i + 3} : Value of "Auto Tiering" can be only either "Enabled" or "Disabled".') buckets_invalid_check = True - + # Check for the Object Versioning column if columnname == 'Object Versioning': - if columnvalue.lower() not in ['enabled','disabled']: - log(f'ROW {i + 3} : Value of "Object Versioning" can be only either "Enabled" or "Disabled".') + if columnvalue.lower() not in ['enabled', 'disabled']: + log(f'ROW {i + 3} : Value of "Object Versioning" can only be "Enabled" or "Disabled".') buckets_invalid_check = True if columnname == 'Emit Object Events': @@ -1410,70 +1414,100 @@ def validate_buckets(filename, comp_ids): log(f'ROW {i + 3} : Value of "Visibility" can be only either "Private" or "Public".') buckets_invalid_check = True - #Check for valid destination region for enabling the replication policy - if columnname == 'Replication Policy': - columnvalue= columnvalue.split("::") - if len(columnvalue) == 3 and all(columnvalue): - replication_policy_name = columnvalue[0] - destination_region = columnvalue[1].lower() - if destination_region in ct.region_dict: - destination_region = ct.region_dict[destination_region] - else: - log(f'ROW {i + 3} : The "Destination_region" of replication policy is not a valid region.') + # Check for valid destination region for enabling the replication policy + if columnname == 'Replication Policy' and columnvalue != "nan": + columnvalue = columnvalue.split("::") + if len(columnvalue) == 3: + replication_policy_name = columnvalue[0] + destination_region = columnvalue[1].lower() + destination_bucket_name = columnvalue[2] + if replication_policy_name.strip() and destination_bucket_name.strip(): + if destination_region in ct.region_dict: + destination_region = ct.region_dict[destination_region] + else: + log(f'ROW {i + 3} : The "Destination_region" of replication policy is not a valid region.') + buckets_invalid_check = True + else: + log(f'ROW {i + 3} : The replication policy format is incorrect or policy name/destination bucket is empty.') + buckets_invalid_check = True + else: + log(f'ROW {i + 3} : The replication policy format is incorrect.') buckets_invalid_check = True + # Get the current time + current_time = datetime.datetime.utcnow() #Check for the retention policy details if columnname == 'Retention Rules': rule_values = columnvalue.split("\n") - retention_rules = [] - for rule in rule_values: - rule_components = rule.split("::") - if len(rule_components) >= 1: - retention_rule_display_name = rule_components[0] - time_unit = None - time_amount = None - time_rule_locked = None - - if len(rule_components) >= 2: - if rule_components[1].lower() == 'indefinite': - time_amount = None - else: - time_amount = rule_components[1] - if not time_amount.isdigit(): - log(f'ROW {i + 3} : "time_amount" of retention rule is not in valid format. It should be an "integer" or "indefinite".') - buckets_invalid_check = True - continue + if rule_values and str(dfbuckets.loc[i, 'Object Versioning']).strip().lower() == 'enabled': + log(f'ROW {i + 3} : Retention policy cannot be created when Object Versioning is enabled.') + buckets_invalid_check = True + + elif rule_values and str(dfbuckets.loc[i, 'Object Versioning']).strip().lower() == 'disabled': + retention_rules = [] + for rule in rule_values: + rule_components = rule.split("::") + if len(rule_components) >= 1: + retention_rule_display_name = rule_components[0] + time_unit = None + time_amount = None + time_rule_locked = None + + if len(rule_components) >= 2: + if rule_components[1].lower() == 'indefinite': + time_amount = None else: - time_amount = int(time_amount) + time_amount = rule_components[1] + if not time_amount.isdigit(): + log(f'ROW {i + 3} : "time_amount" of retention rule is not in valid format. It should be an "integer" or "indefinite".') + buckets_invalid_check = True + continue + else: + time_amount = int(time_amount) - if len(rule_components) >= 3: - time_unit = rule_components[2].upper() - if time_unit not in ('DAYS', 'YEARS'): - log(f'ROW {i + 3} : "time_unit" of retention rule is not in valid format. It should be either DAYS or YEARS.') - buckets_invalid_check = True - else: - # If time_unit is valid, set the flag to True for processing time_rule_locked - process_time_rule_locked = True - - if len(rule_components) == 4 and process_time_rule_locked: - time_rule_locked = rule_components[3] - if time_rule_locked.endswith(".000Z"): - time_rule_locked = time_rule_locked[:-5] + "Z" - elif not re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z|\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z",time_rule_locked): - # Convert from "dd-mm-yyyy" to "YYYY-MM-DDThh:mm:ssZ" format - if re.match(r"\d{2}-\d{2}-\d{4}", time_rule_locked): - try: - datetime_obj = datetime.datetime.strptime(time_rule_locked, "%d-%m-%Y") - time_rule_locked = datetime_obj.strftime("%Y-%m-%dT%H:%M:%SZ") - except ValueError: + if len(rule_components) >= 3: + time_unit = rule_components[2].upper() + if time_unit not in ('DAYS', 'YEARS'): + log(f'ROW {i + 3} : "time_unit" of retention rule is not in valid format. It should be either DAYS or YEARS.') + buckets_invalid_check = True + else: + # If time_unit is valid, set the flag to True for processing time_rule_locked + process_time_rule_locked = True + + if len(rule_components) == 4 and process_time_rule_locked: + time_rule_locked = rule_components[3] + if time_rule_locked.endswith(".000Z"): + time_rule_locked = time_rule_locked[:-5] + "Z" + elif not re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z|\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z",time_rule_locked): + # Convert from "dd-mm-yyyy" to "YYYY-MM-DDThh:mm:ssZ" format + if re.match(r"\d{2}-\d{2}-\d{4}", time_rule_locked): + try: + datetime_obj = datetime.datetime.strptime(time_rule_locked, "%d-%m-%Y") + time_rule_locked = datetime_obj.strftime("%Y-%m-%dT%H:%M:%SZ") + except ValueError: + log(f'ROW {i + 3} : "time_rule_locked" of retention rule is not in valid format. It should be in the format "dd-mm-yyyy".') + buckets_invalid_check = True + continue + else: log(f'ROW {i + 3} : "time_rule_locked" of retention rule is not in valid format. It should be in the format "dd-mm-yyyy".') buckets_invalid_check = True continue - else: - log(f'ROW {i + 3} : "time_rule_locked" of retention rule is not in valid format. It should be in the format "dd-mm-yyyy".') + # Parse the time_rule_locked into a datetime object + try: + time_rule_locked_datetime = datetime.datetime.strptime(time_rule_locked, "%Y-%m-%dT%H:%M:%SZ") + except ValueError: + log(f'ROW {i + 3} : "time_rule_locked" of retention rule is not in valid format. It should be in the format "YYYY-MM-DDThh:mm:ssZ".') buckets_invalid_check = True continue + # Calculate the difference between current time and time_rule_locked + time_difference = time_rule_locked_datetime - current_time + + # Check if the difference is less than 14 days + if time_difference.days < 14: + log(f'ROW {i + 3} : "time_rule_locked" of retention rule must be more than 14 days from the current time.') + buckets_invalid_check = True + # Check for the Lifecycle Policy Details if lifecycle_input == True: # Define the valid options for the "Lifecycle Target and Action" column @@ -1486,7 +1520,6 @@ def validate_buckets(filename, comp_ids): 'multipart-uploads::Abort' ] - # Check if "Lifecycle Target and Action" is empty if columnname == 'Lifecycle Target and Action': if columnvalue != 'nan' and columnvalue not in valid_options: @@ -1518,6 +1551,7 @@ def validate_buckets(filename, comp_ids): if time_unit not in ['days','years']: log(f'ROW {i + 3} : Invalid time amount. "Lifecycle Rule Period" must be "DAYS" or "YEARS".') buckets_invalid_check = True + else: log(f'ROW {i + 3} : Invalid format for "Lifecycle Rule Period" ') buckets_invalid_check = True diff --git a/cd3_automation_toolkit/cis_reports.py b/cd3_automation_toolkit/cis_reports.py index c4ae5ac91..8b0c1dda1 100644 --- a/cd3_automation_toolkit/cis_reports.py +++ b/cd3_automation_toolkit/cis_reports.py @@ -1,5 +1,5 @@ ########################################################################## -# Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. # # cis_reports.py @@ -35,9 +35,9 @@ except Exception: OUTPUT_TO_XLSX = False -RELEASE_VERSION = "2.8.0" -PYTHON_SDK_VERSION = "2.120.0" -UPDATED_DATE = "February 23, 2024" +RELEASE_VERSION = "2.8.1" +PYTHON_SDK_VERSION = "2.124.1" +UPDATED_DATE = "March 25, 2024" ########################################################################## @@ -138,7 +138,7 @@ class CIS_Report: str_kms_key_time_max_datetime = kms_key_time_max_datetime.strftime(__iso_time_format) kms_key_time_max_datetime = datetime.datetime.strptime(str_kms_key_time_max_datetime, __iso_time_format) - def __init__(self, config, signer, proxy, output_bucket, report_directory, print_to_screen, regions_to_run_in, raw_data, obp, redact_output, debug=False, all_resources=True): + def __init__(self, config, signer, proxy, output_bucket, report_directory, report_prefix, report_summary_json, print_to_screen, regions_to_run_in, raw_data, obp, redact_output, debug=False, all_resources=True): # CIS Foundation benchmark 2.0.0 self.cis_foundations_benchmark_2_0 = { @@ -169,7 +169,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, print '3.1': {'section': 'Compute', 'recommendation_#': '3.1', 'Title': 'Ensure Compute Instance Legacy Metadata service endpoint is disabled.', 'Status': True, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': ['4.6'], 'CCCS Guard Rail': '', 'Remediation': []}, '3.2': {'section': 'Compute', 'recommendation_#': '3.2', 'Title': 'Ensure Secure Boot is enabled on Compute Instance.', 'Status': True, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': ['4.1'], 'CCCS Guard Rail': '', 'Remediation': []}, - '3.3': {'section': 'Compute', 'recommendation_#': '3.2', 'Title': 'Ensure Compute Instance Legacy MetaData service endpoint is disabled.', 'Status': True, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': [''], 'CCCS Guard Rail': '', 'Remediation': []}, + '3.3': {'section': 'Compute', 'recommendation_#': '3.3', 'Title': 'Ensure In-transit Encryption is enabled on Compute Instance.', 'Status': True, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': [''], 'CCCS Guard Rail': '', 'Remediation': []}, '4.1': {'section': 'Logging and Monitoring', 'recommendation_#': '4.1', 'Title': 'Ensure default tags are used on resources.', 'Status': False, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': ['1.1'], 'CCCS Guard Rail': '', 'Remediation': []}, '4.2': {'section': 'Logging and Monitoring', 'recommendation_#': '4.2', 'Title': 'Create at least one notification topic and subscription to receive monitoring alerts.', 'Status': False, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': ['8.2', '8.11'], 'CCCS Guard Rail': '11', 'Remediation': []}, @@ -196,7 +196,6 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, print '5.2.2': {'section': 'Storage - Block Volumes', 'recommendation_#': '5.2.2', 'Title': 'Ensure Boot Volumes are encrypted with Customer Managed Key.', 'Status': True, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': ['3.11'], 'CCCS Guard Rail': ''}, '5.3.1': {'section': 'Storage - File Storage Service', 'recommendation_#': '5.3.1', 'Title': 'Ensure File Storage Systems are encrypted with Customer Managed Keys.', 'Status': True, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': ['3.11'], 'CCCS Guard Rail': '', 'Remediation': []}, - '6.1': {'section': 'Asset Management', 'recommendation_#': '6.1', 'Title': 'Create at least one compartment in your tenancy to store cloud resources.', 'Status': True, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': ['3.1'], 'CCCS Guard Rail': '2,3,8,12', 'Remediation': []}, '6.2': {'section': 'Asset Management', 'recommendation_#': '6.2', 'Title': 'Ensure no resources are created in the root compartment.', 'Status': True, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': ['3.12'], 'CCCS Guard Rail': '1,2,3', 'Remediation': []} } @@ -942,10 +941,10 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, print self.__raw_regions.append(record) # By Default it is today's date - if report_directory: - self.__report_directory = report_directory + "/" - else: - self.__report_directory = self.__tenancy.name + "-" + self.report_datetime + self.__report_directory = f'{report_directory}/' if report_directory else f'{self.__tenancy.name}-{self.report_datetime}' + + self.__report_prefix = f'{report_prefix}_' if report_prefix else '' + self.__report_summary_json = report_summary_json # Checking if a Tenancy has Identity Domains enabled try: @@ -956,7 +955,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, print except Exception as e: # To be safe if it fails I'll check self.__identity_domains_enabled = True - debug("__init__: Exception checking identity domains status \n" + str(e)) + debug("__init__: Exception checking identity domains status\n" + str(e)) self.__errors.append({"id" : "__init__", "error" : str(e)}) @@ -998,7 +997,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, print def __create_regional_signers(self, proxy): print("Creating regional signers and configs...") for region_key, region_values in self.__regions.items(): - debug("processing __create_regional_signers ") + debug("processing __create_regional_signers") # Creating regional configs and signers region_signer = self.__signer region_signer.region_name = region_key @@ -1142,7 +1141,7 @@ def __identity_read_compartments(self): # Need to convert for raw output for compartment in self.__compartments: - debug("__identity_read_compartments: Getting Compartments:" + compartment.name) + debug("__identity_read_compartments: Getting Compartments: " + compartment.name) deep_link = self.__oci_compartment_uri + compartment.id record = { 'id': compartment.id, @@ -1187,7 +1186,7 @@ def __identity_read_compartments(self): return self.__compartments except Exception as e: - debug("__identity_read_compartments: Error Getting Compartments:" + compartment.name) + debug("__identity_read_compartments: Error Getting Compartments: " + compartment.name) self.__errors.append({"id" : "__identity_read_compartments", "error" : str(e)}) raise RuntimeError( "Error in identity_read_compartments: " + str(e.args)) @@ -1203,7 +1202,7 @@ def __identity_read_domains(self): # Finding all Identity Domains in the tenancy for compartment in self.__compartments: try: - debug("__identity_read_domains: Getting Identity Domains for Compartment :" + str(compartment.name)) + debug("__identity_read_domains: Getting Identity Domains for Compartment: " + str(compartment.name)) raw_identity_domains += oci.pagination.list_call_get_all_results( self.__regions[self.__home_region]['identity_client'].list_domains, @@ -1212,14 +1211,14 @@ def __identity_read_domains(self): ).data except Exception as e: - debug("__identity_read_domains: Exception collecting Identity Domains \n" + str(e)) + debug("__identity_read_domains: Exception collecting Identity Domains\n" + str(e)) # If this fails the tenancy likely doesn't have identity domains or the permissions are off for domain in raw_identity_domains: debug("__identity_read_domains: Getting password policy for domain: " + domain.display_name) - domain_dict = oci.util.to_dict(domain) + domain_dict = oci.util.to_dict(domain) try: - debug("__identity_read_domains: Getting Identity Domain Password Policy for :" + domain.display_name) + debug("__identity_read_domains: Getting Identity Domain Password Policy for: " + domain.display_name) idcs_url = domain.url + "/admin/v1/PasswordPolicies/PasswordPolicy" raw_pwd_policy_resp = requests.get(url=idcs_url, auth=self.__signer) raw_pwd_policy_dict = json.loads(raw_pwd_policy_resp.content) @@ -1257,42 +1256,44 @@ def __identity_read_groups_and_membership(self): debug("processing __identity_read_groups_and_membership for Identity Domains Enabled Tenancy") for identity_domain in self.__identity_domains: debug("processing __identity_read_groups_and_membership for Identity Domain: " + identity_domain['display_name']) + id_domain_deep_link = self.__oci_identity_domains_uri + identity_domain['id'] try: groups_data = self.__identity_domains_get_all_results(func=identity_domain['IdentityDomainClient'].list_groups, args={}) for grp in groups_data: debug("\t__identity_read_groups_and_membership: reading group data " + str(grp.display_name)) grp_deep_link = self.__oci_identity_domains_uri + identity_domain['id'] + "/groups/" + grp.ocid - for grp in groups_data: - if not grp.members: - debug("\t\t__identity_read_groups_and_membership: Adding group with no members " + str(grp.display_name)) - + if not grp.members: + debug("\t\t__identity_read_groups_and_membership: Adding group with no members " + str(grp.display_name)) + + group_record = { + "id": grp.ocid, + "name": grp.display_name, + "deep_link": self.__generate_csv_hyperlink(grp_deep_link, grp.display_name), + "domain_deeplink" : self.__generate_csv_hyperlink(id_domain_deep_link, identity_domain['display_name']), + "description": grp.urn_ietf_params_scim_schemas_oracle_idcs_extension_group_group.description if grp.urn_ietf_params_scim_schemas_oracle_idcs_extension_group_group else None, + "time_created" : self.get_date_iso_format(grp.meta.created), + "user_id": "", + "user_id_link": "" + } + # Adding a record per empty group + self.__groups_to_users.append(group_record) + else: + # For groups with members print one record per user per group + for member in grp.members: + debug("\t__identity_read_groups_and_membership: reading members data in group" + str(grp.display_name)) + user_deep_link = self.__oci_identity_domains_uri + identity_domain['id'] + "/users/" + member.ocid group_record = { - "id": grp.ocid, + "id": grp.id, "name": grp.display_name, "deep_link": self.__generate_csv_hyperlink(grp_deep_link, grp.display_name), + "domain_deeplink" : self.__generate_csv_hyperlink(id_domain_deep_link, identity_domain['display_name']), "description": grp.urn_ietf_params_scim_schemas_oracle_idcs_extension_group_group.description if grp.urn_ietf_params_scim_schemas_oracle_idcs_extension_group_group else None, "time_created" : self.get_date_iso_format(grp.meta.created), - "user_id": "", - "user_id_link": "" + "user_id": member.ocid, + "user_id_link": self.__generate_csv_hyperlink(user_deep_link, member.name) } - # Adding a record per empty group + # Adding a record per user to group self.__groups_to_users.append(group_record) - else: - # For groups with members print one record per user per group - for member in grp.members: - debug("\t__identity_read_groups_and_membership: reading members data in group" + str(grp.display_name)) - user_deep_link = self.__oci_identity_domains_uri + identity_domain['id'] + "/users/" + member.ocid - group_record = { - "id": grp.id, - "name": grp.display_name, - "deep_link": self.__generate_csv_hyperlink(grp_deep_link, grp.display_name), - "description": grp.urn_ietf_params_scim_schemas_oracle_idcs_extension_group_group.description if grp.urn_ietf_params_scim_schemas_oracle_idcs_extension_group_group else None, - "time_created" : self.get_date_iso_format(grp.meta.created), - "user_id": member.ocid, - "user_id_link": self.__generate_csv_hyperlink(user_deep_link, member.name) - } - # Adding a record per user to group - self.__groups_to_users.append(group_record) except Exception as e: self.__errors.append({"id" : "__identity_read_groups_and_membership", "error" : str(e)}) @@ -1322,7 +1323,9 @@ def __identity_read_groups_and_membership(self): "id": grp.id, "name": grp.name, "deep_link": self.__generate_csv_hyperlink(grp_deep_link, grp.name), + "domain_deeplink" : "", "description": grp.description, + "domain_deeplink" : "", "lifecycle_state": grp.lifecycle_state, "time_created": grp.time_created.strftime(self.__iso_time_format), "user_id": "", @@ -1338,6 +1341,7 @@ def __identity_read_groups_and_membership(self): "id": grp.id, "name": grp.name, "deep_link": self.__generate_csv_hyperlink(grp_deep_link, grp.name), + "domain_deeplink" : "", "description": grp.description, "lifecycle_state": grp.lifecycle_state, "time_created": grp.time_created.strftime(self.__iso_time_format), @@ -1390,108 +1394,123 @@ def __identity_domains_get_all_results(self, func, args): ########################################################################## def __identity_read_users(self): debug(f'__identity_read_users: Getting User data for Identity Domains: {str(self.__identity_domains_enabled)}') - if self.__identity_domains_enabled: - for identity_domain in self.__identity_domains: + try: + if self.__identity_domains_enabled: + for identity_domain in self.__identity_domains: + try: + users_data = self.__identity_domains_get_all_results(func=identity_domain['IdentityDomainClient'].list_users, + args={}) + # Adding record to the users + for user in users_data: + deep_link = self.__oci_identity_domains_uri + identity_domain['id'] + "/users/" + user.ocid + id_domain_deep_link = self.__oci_identity_domains_uri + identity_domain['id'] + record = { + 'id': user.ocid, + 'domain_deeplink' : self.__generate_csv_hyperlink(id_domain_deep_link, identity_domain['display_name']), + 'name': user.user_name, + 'deep_link': self.__generate_csv_hyperlink(deep_link, user.user_name), + 'defined_tags': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_oci_tags.defined_tags if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_oci_tags else None, + 'description': user.description, + 'email': user.emails[0].value if user.emails else None, + 'email_verified': user.emails[0].verified if user.emails else None, + 'external_identifier': user.external_id, + 'is_federated': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_user_user.is_federated_user, + 'is_mfa_activated': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_mfa_user.mfa_status if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_mfa_user else None, + 'lifecycle_state': user.active, + 'time_created': user.meta.created, + 'can_use_api_keys': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_api_keys if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'can_use_auth_tokens': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_auth_tokens if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'can_use_console_password': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_console_password if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'can_use_customer_secret_keys': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_customer_secret_keys if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'can_use_db_credentials': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_db_credentials if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'can_use_o_auth2_client_credentials': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_o_auth2_client_credentials if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'can_use_smtp_credentials': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_smtp_credentials if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'groups': [] + } + # Adding Groups to the user + for group in self.__groups_to_users: + if user.ocid == group['user_id']: + record['groups'].append(group['name']) + if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_user_credentials_user: + debug("__identity_read_users: Collecting user API Key for user: " + str(user.user_name)) + record['api_keys'] = self.__identity_read_user_api_key(user_ocid=user.ocid, identity_domain=identity_domain) + record['auth_tokens'] = self.__identity_read_user_auth_token(user.ocid, identity_domain=identity_domain) + record['customer_secret_keys'] = self.__identity_read_user_customer_secret_key(user.ocid, identity_domain=identity_domain) + record['database_passowrds'] = self.__identity_read_user_database_password(user.ocid,identity_domain=identity_domain) + else: + debug("__identity_read_users: skipping user API Key collection for user: " + str(user.user_name)) + record['api_keys'] = None + record['auth_tokens'] = None + record['customer_secret_keys'] = None + record['database_passowrds'] = None + self.__users.append(record) + + except Exception as e: + debug("__identity_read_users: Identity Domains are : " + str(self.__identity_domains_enabled)) + self.__errors.append({'id' : "__identity_read_users", 'error' : str(e)}) + raise RuntimeError( + "Error in __identity_read_users: " + str(e)) + + print("\tProcessed " + str(len(self.__users)) + " Users") + return self.__users + + else: try: - users_data = self.__identity_domains_get_all_results(func=identity_domain['IdentityDomainClient'].list_users, - args={}) + # Getting all users in the Tenancy + users_data = oci.pagination.list_call_get_all_results( + self.__regions[self.__home_region]['identity_client'].list_users, + compartment_id=self.__tenancy.id + ).data + # Adding record to the users for user in users_data: - deep_link = self.__oci_identity_domains_uri + identity_domain['id'] + "/users/" + user.ocid + deep_link = self.__oci_users_uri + user.id record = { - 'id': user.ocid, - 'name': user.user_name, - 'deep_link': self.__generate_csv_hyperlink(deep_link, user.user_name), - 'defined_tags': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_oci_tags.defined_tags if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_oci_tags else None, + 'id': user.id, + 'domain_deeplink' : "", + 'name': user.name, + 'deep_link': self.__generate_csv_hyperlink(deep_link, user.name), + 'defined_tags': user.defined_tags, 'description': user.description, - 'email': user.emails[0].value if user.emails else None, - 'email_verified': user.emails[0].verified if user.emails else None, - 'external_identifier': user.external_id, - 'identity_provider_id': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_user_user.provider, - 'is_mfa_activated': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_mfa_user.mfa_status if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_mfa_user else None, - 'lifecycle_state': user.active, - 'time_created': user.meta.created, - 'can_use_api_keys': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_api_keys if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, - 'can_use_auth_tokens': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_auth_tokens if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, - 'can_use_console_password': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_console_password if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, - 'can_use_customer_secret_keys': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_customer_secret_keys if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, - 'can_use_db_credentials': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_db_credentials if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, - 'can_use_o_auth2_client_credentials': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_o_auth2_client_credentials if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, - 'can_use_smtp_credentials': user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user.can_use_smtp_credentials if user.urn_ietf_params_scim_schemas_oracle_idcs_extension_capabilities_user else None, + 'email': user.email, + 'email_verified': user.email_verified, + 'external_identifier': user.external_identifier, + 'is_federated': True if user.identity_provider_id is not None else False, + 'is_mfa_activated': user.is_mfa_activated, + 'lifecycle_state': True if user.lifecycle_state == 'ACTIVE' else False, + 'time_created': user.time_created.strftime(self.__iso_time_format), + 'can_use_api_keys': user.capabilities.can_use_api_keys, + 'can_use_auth_tokens': user.capabilities.can_use_auth_tokens, + 'can_use_console_password': user.capabilities.can_use_console_password, + 'can_use_customer_secret_keys': user.capabilities.can_use_customer_secret_keys, + 'can_use_db_credentials': user.capabilities.can_use_db_credentials, + 'can_use_o_auth2_client_credentials': user.capabilities.can_use_o_auth2_client_credentials, + 'can_use_smtp_credentials': user.capabilities.can_use_smtp_credentials, 'groups': [] } # Adding Groups to the user for group in self.__groups_to_users: - if user.ocid == group['user_id']: + if user.id == group['user_id']: record['groups'].append(group['name']) - record['api_keys'] = self.__identity_read_user_api_key(user_ocid=user.ocid, identity_domain=identity_domain) - record['auth_tokens'] = self.__identity_read_user_auth_token(user.ocid, identity_domain=identity_domain) - record['customer_secret_keys'] = self.__identity_read_user_customer_secret_key(user.ocid, identity_domain=identity_domain) - record['database_passowrds'] = self.__identity_read_user_database_password(user.ocid,identity_domain=identity_domain) + record['api_keys'] = self.__identity_read_user_api_key(user.id) + record['auth_tokens'] = self.__identity_read_user_auth_token( + user.id) + record['customer_secret_keys'] = self.__identity_read_user_customer_secret_key( + user.id) + record['database_passowrds'] = self.__identity_read_user_database_password(user.id) self.__users.append(record) + print("\tProcessed " + str(len(self.__users)) + " Users") + return self.__users except Exception as e: - debug("__identity_read_users: Identity Domains are : " + str(self.__identity_domains_enabled)) - self.__errors.append({'id' : "__identity_read_users", 'error' : str(e)}) + debug("__identity_read_users: Error is: " + str(e)) + self.__errors.append({"id" : "__identity_read_users", "error" : str(e)}) raise RuntimeError( "Error in __identity_read_users: " + str(e)) - - print("\tProcessed " + str(len(self.__users)) + " Users") - return self.__users - - else: - try: - # Getting all users in the Tenancy - users_data = oci.pagination.list_call_get_all_results( - self.__regions[self.__home_region]['identity_client'].list_users, - compartment_id=self.__tenancy.id - ).data - - # Adding record to the users - for user in users_data: - deep_link = self.__oci_users_uri + user.id - record = { - 'id': user.id, - 'name': user.name, - 'deep_link': self.__generate_csv_hyperlink(deep_link, user.name), - 'defined_tags': user.defined_tags, - 'description': user.description, - 'email': user.email, - 'email_verified': user.email_verified, - 'external_identifier': user.external_identifier, - 'identity_provider_id': user.identity_provider_id, - 'is_mfa_activated': user.is_mfa_activated, - 'lifecycle_state': True if user.lifecycle_state == 'ACTIVE' else False, - 'time_created': user.time_created.strftime(self.__iso_time_format), - 'can_use_api_keys': user.capabilities.can_use_api_keys, - 'can_use_auth_tokens': user.capabilities.can_use_auth_tokens, - 'can_use_console_password': user.capabilities.can_use_console_password, - 'can_use_customer_secret_keys': user.capabilities.can_use_customer_secret_keys, - 'can_use_db_credentials': user.capabilities.can_use_db_credentials, - 'can_use_o_auth2_client_credentials': user.capabilities.can_use_o_auth2_client_credentials, - 'can_use_smtp_credentials': user.capabilities.can_use_smtp_credentials, - 'groups': [] - } - # Adding Groups to the user - for group in self.__groups_to_users: - if user.id == group['user_id']: - record['groups'].append(group['name']) - - record['api_keys'] = self.__identity_read_user_api_key(user.id) - record['auth_tokens'] = self.__identity_read_user_auth_token( - user.id) - record['customer_secret_keys'] = self.__identity_read_user_customer_secret_key( - user.id) - record['database_passowrds'] = self.__identity_read_user_database_password(user.id) - self.__users.append(record) - print("\tProcessed " + str(len(self.__users)) + " Users") - return self.__users - - except Exception as e: - debug("__identity_read_users: User ID is: " + str(user)) - raise RuntimeError( - "Error in __identity_read_users: " + str(e.args)) + except Exception as e: + raise RuntimeError( + "Error in __identity_read_users: " + str(e.args)) ########################################################################## # Load user api keys ########################################################################## @@ -1678,7 +1697,7 @@ def __identity_read_user_database_password(self, user_ocid, identity_domain=None ########################################################################## def __identity_read_tenancy_policies(self): try: - debug("__identity_read_tenancy_policies: Getting Tenancy policies :") + debug("__identity_read_tenancy_policies: Getting Tenancy policies: ") policies_data = oci.pagination.list_call_get_all_results( self.__regions[self.__home_region]['search_client'].search_resources, search_details=oci.resource_search.models.StructuredSearchDetails( @@ -1686,7 +1705,7 @@ def __identity_read_tenancy_policies(self): ).data for policy in policies_data: - debug("__identity_read_tenancy_policies: Reading Tenancy policies : " + policy.display_name) + debug("__identity_read_tenancy_policies: Reading Tenancy policies: " + policy.display_name) deep_link = self.__oci_policies_uri + policy.identifier record = { "id": policy.identifier, @@ -1702,7 +1721,7 @@ def __identity_read_tenancy_policies(self): return self.__policies except Exception as e: - debug("__identity_read_tenancy_policies: Exception reading Tenancy policies : " + policy.display_name) + debug("__identity_read_tenancy_policies: Exception reading Tenancy policies: " + policy.display_name) self.__errors.append({"id" : "__identity_read_tenancy_policies", "error" : str(e)}) raise RuntimeError("Error in __identity_read_tenancy_policies: " + str(e.args)) @@ -1716,11 +1735,13 @@ def __identity_read_dynamic_groups(self): for identity_domain in self.__identity_domains: dynamic_groups_data = self.__identity_domains_get_all_results(func=identity_domain['IdentityDomainClient'].list_dynamic_resource_groups, args={}) + id_domain_deep_link = self.__oci_identity_domains_uri + identity_domain['id'] for dynamic_group in dynamic_groups_data: debug("__identity_read_dynamic_groups: reading dynamic groups" + str(dynamic_group.display_name)) deep_link = self.__oci_identity_domains_uri + "/domains/" + identity_domain['id'] + "/dynamic-groups/" + dynamic_group.id record = oci.util.to_dict(dynamic_group) record['deep_link'] = self.__generate_csv_hyperlink(deep_link, dynamic_group.display_name) + record['domain_deeplink'] = self.__generate_csv_hyperlink(id_domain_deep_link, identity_domain['display_name']) self.__dynamic_groups.append(record) else: @@ -1733,7 +1754,7 @@ def __identity_read_dynamic_groups(self): debug("__identity_read_dynamic_groups: reading dynamic groups" + str(dynamic_group.name)) record = oci.util.to_dict(dynamic_group) record['deep_link'] = self.__generate_csv_hyperlink(deep_link, dynamic_group.name) - + record['domain_deeplink'] = None self.__dynamic_groups.append(record) print("\tProcessed " + str(len(self.__dynamic_groups)) + " Dynamic Groups") @@ -1749,9 +1770,9 @@ def __identity_read_dynamic_groups(self): ############################################ def __identity_read_availability_domains(self): try: - debug("__identity_read_availability_domains: Getting Availability Domains for regions :") + debug("__identity_read_availability_domains: Getting Availability Domains for regions:") for region_key, region_values in self.__regions.items(): - debug("__identity_read_availability_domains: reading Availability Domains for regions :" +region_key) + debug("__identity_read_availability_domains: reading Availability Domains for regions: " +region_key) region_values['availability_domains'] = oci.pagination.list_call_get_all_results( region_values['identity_client'].list_availability_domains, compartment_id=self.__tenancy.id @@ -1759,8 +1780,8 @@ def __identity_read_availability_domains(self): print("\tProcessed " + str(len(region_values['availability_domains'])) + " Availability Domains in " + region_key) except Exception as e: - debug("__identity_read_availability_domains: reading availability domain" + str(region_key)) - self.__errors.append({"id" : "__identity_read_availability_domains" + "_" + str(region_key), "error" : str(e)}) + debug("__identity_read_availability_domains: reading availability domain " + str(region_key)) + self.__errors.append({"id": "__identity_read_availability_domains" + "_" + str(region_key), "error": str(e)}) raise RuntimeError( "Error in __identity_read_availability_domains: " + str(e.args)) @@ -2635,7 +2656,9 @@ def __network_read_ip_sec_connections(self): ############################################ def __network_topology_dump(self): debug("__network_topology_dump: Starting") - + if type(self.__signer) == oci.auth.signers.InstancePrincipalsDelegationTokenSigner: + self.__errors.append({"id": "__network_topology_dump", "error": "Delegated Tokens via Cloud Shell not supported." }) + return def api_function(region_key, region_values, tenancy_id): try: get_vcn_topology_response = region_values['topology_client'].get_networking_topology( @@ -3379,25 +3402,6 @@ def __budget_read_budgets(self): raise RuntimeError( "Error in __budget_read_budgets " + str(e.args)) - ########################################################################## - # Audit Configuration - ########################################################################## - def __audit_read_tenancy_audit_configuration(self): - # Pulling the Audit Configuration - try: - self.__audit_retention_period = self.__regions[self.__home_region]['audit_client'].get_configuration( - self.__tenancy.id).data.retention_period_days - except Exception as e: - if "NotAuthorizedOrNotFound" in str(e): - self.__audit_retention_period = -1 - print("\t*** Access to audit retention requires the user to be part of the Administrator group ***") - self.__errors.append({"id" : self.__tenancy.id, "error" : "*** Access to audit retention requires the user to be part of the Administrator group ***"}) - else: - raise RuntimeError("Error in __audit_read_tenancy_audit_configuration " + str(e.args)) - - print("\tProcessed Audit Configuration.") - return self.__audit_retention_period - ########################################################################## # Cloud Guard Configuration ########################################################################## @@ -3648,7 +3652,7 @@ def __search_resources_in_root_compartment(self): # query = [] # resources_in_root_data = [] # record = [] - query_non_compliant = "query VCN, instance, volume, filesystem, bucket, autonomousdatabase, database, dbsystem resources where compartmentId = '" + self.__tenancy.id + "'" + query_non_compliant = "query VCN, instance, volume, bootvolume, filesystem, bucket, autonomousdatabase, database, dbsystem resources where compartmentId = '" + self.__tenancy.id + "'" query_all_resources = "query all resources where compartmentId = '" + self.__tenancy.id + "'" # resources_in_root_data = self.__search_run_structured_query(query) @@ -3687,7 +3691,7 @@ def __search_resources_in_root_compartment(self): } self.cis_foundations_benchmark_2_0['6.2']['Total'].append(record) except: - self.__errors.append({"id" : "search_resources_in_root_compartment Invalid OCID", "error" : str(item)}) + self.__errors.append({"id": "search_resources_in_root_compartment Invalid OCID", "error" : str(item)}) debug(f'__search_resources_in_root_compartment: Invalid OCID: {str(item)}') except Exception as e: @@ -3772,7 +3776,7 @@ def __core_instance_read_compute(self): # Returning Instances - print("\tProcessed " + str(len(self.__Instance)) + " Service Connectors") + print("\tProcessed " + str(len(self.__Instance)) + " Compute Instances") return self.__service_connectors except Exception as e: raise RuntimeError("Error in __core_instance_read_compute " + str(e.args)) @@ -3808,7 +3812,7 @@ def __report_cis_analyze_tenancy_data(self): # 1.3 Check - May want to add a service check for policy in self.__policies: - if policy['name'].upper() != "Tenant Admin Policy".upper() and policy['name'].upper() != "PSM-root-policy".upper(): + if policy['name'].lower() not in ['tenant admin policy', 'psm-root-policy']: for statement in policy['statements']: if ("allow group".upper() in statement.upper() and "tenancy".upper() in statement.upper() and ("to manage ".upper() in statement.upper() or "to use".upper() in statement.upper()) and ("all-resources".upper() in statement.upper() or (" groups ".upper() in statement.upper() and " users ".upper() in statement.upper()))): split_statement = statement.split("where") @@ -3877,7 +3881,7 @@ def __report_cis_analyze_tenancy_data(self): # 1.7 Check - Local Users w/o MFA for user in self.__users: - if user['identity_provider_id'] is None and user['can_use_console_password'] and not (user['is_mfa_activated']) and user['lifecycle_state']: + if not(user['is_federated']) and user['can_use_console_password'] and not (user['is_mfa_activated']) and user['lifecycle_state']: self.cis_foundations_benchmark_2_0['1.7']['Status'] = False self.cis_foundations_benchmark_2_0['1.7']['Findings'].append( user) @@ -3996,26 +4000,25 @@ def __report_cis_analyze_tenancy_data(self): # CIS 1.15 Check - Ensure storage service-level admins cannot delete resources they manage. # Iterating through all policies for policy in self.__policies: - if policy['name'].upper() != "Tenant Admin Policy".upper() and policy['name'].upper() != "PSM-root-policy": + if policy['name'].lower() not in ['tenant admin policy', 'psm-root-policy']: for statement in policy['statements']: for resource in self.cis_iam_checks['1.15']: - if "allow group".upper() in statement.upper() and "manage".upper() in statement.upper() and resource.upper() in statement.upper(): + if "allow group".upper() in statement.upper() and "to manage ".upper() in statement.upper() and resource.upper() in statement.upper(): split_statement = statement.split("where") if len(split_statement) == 2: clean_where_clause = split_statement[1].upper().replace(" ", "").replace("'", "") if all(permission.upper() in clean_where_clause for permission in self.cis_iam_checks['1.15'][resource]) and \ not(all(permission.upper() in clean_where_clause for permission in self.cis_iam_checks['1.15-storage-admin'][resource])): - debug("__report_cis_analyze_tenancy_data no permissions to delete storage : " + str(policy['name'])) - + debug("__report_cis_analyze_tenancy_data no permissions to delete storage: " + str(policy['name'])) pass # Checking if this is the Storage admin with allowed elif all(permission.upper() in clean_where_clause for permission in self.cis_iam_checks['1.15-storage-admin'][resource]) and \ not(all(permission.upper() in clean_where_clause for permission in self.cis_iam_checks['1.15'][resource])): - debug("__report_cis_analyze_tenancy_data storage admin policy is : " + str(policy['name'])) + debug("__report_cis_analyze_tenancy_data storage admin policy is: " + str(policy['name'])) pass else: self.cis_foundations_benchmark_2_0['1.15']['Findings'].append(policy) - debug("__report_cis_analyze_tenancy_data else policy is /n: " + str(policy['name'])) + debug("__report_cis_analyze_tenancy_data else policy is\n: " + str(policy['name'])) else: self.cis_foundations_benchmark_2_0['1.15']['Findings'].append(policy) @@ -4025,7 +4028,7 @@ def __report_cis_analyze_tenancy_data(self): else: self.cis_foundations_benchmark_2_0['1.15']['Status'] = True - # CIS Total 1.14 Adding - All IAM Policies for to CIS Total + # CIS Total 1.15 Adding - All IAM Policies for to CIS Total self.cis_foundations_benchmark_2_0['1.15']['Total'] = self.__policies # CIS 2.1, 2.2, & 2.5 Check - Security List Ingress from 0.0.0.0/0 on ports 22, 3389 @@ -4247,17 +4250,21 @@ def __report_cis_analyze_tenancy_data(self): # Generating list of keys for key in self.__kms_keys: - if self.kms_key_time_max_datetime and self.kms_key_time_max_datetime >= datetime.datetime.strptime(key['currentKeyVersion_time_created'], self.__iso_time_format): - self.cis_foundations_benchmark_2_0['4.16']['Status'] = False - self.cis_foundations_benchmark_2_0['4.16']['Findings'].append( - key) - if self.kms_key_time_max_datetime is None: - self.cis_foundations_benchmark_2_0['4.16']['Status'] = False - self.cis_foundations_benchmark_2_0['4.16']['Findings'].append( - key) - - - # CIS Check 3.16 Total - Adding Key to total + try: + if self.kms_key_time_max_datetime and self.kms_key_time_max_datetime >= datetime.datetime.strptime(key['currentKeyVersion_time_created'], self.__iso_time_format): + self.cis_foundations_benchmark_2_0['4.16']['Status'] = False + self.cis_foundations_benchmark_2_0['4.16']['Findings'].append( + key) + if self.kms_key_time_max_datetime is None: + self.cis_foundations_benchmark_2_0['4.16']['Status'] = False + self.cis_foundations_benchmark_2_0['4.16']['Findings'].append( + key) + except: + self.cis_foundations_benchmark_2_0['4.16']['Status'] = False + self.cis_foundations_benchmark_2_0['4.16']['Findings'].append( + key) + + # CIS Check 4.16 Total - Adding Key to total self.cis_foundations_benchmark_2_0['4.16']['Total'].append(key) # CIS Check 4.17 - Object Storage with Logs @@ -4909,12 +4916,14 @@ def __report_generate_cis_report(self, level): # Generating Summary report CSV print_header("Writing CIS reports to CSV") summary_files = [] - summary_file_name = self.__print_to_csv_file( - self.__report_directory, "cis", "summary_report", summary_report) + summary_file_name = self.__print_to_csv_file("cis", "summary_report", summary_report) summary_files.append(summary_file_name) - summary_file_name = self.__report_generate_html_summary_report( - self.__report_directory, "cis", "html_summary_report", summary_report) + if self.__report_summary_json: + summary_file_name = self.__print_to_json_file("cis", "summary_report", summary_report) + summary_files.append(summary_file_name) + + summary_file_name = self.__report_generate_html_summary_report("cis", "html_summary_report", summary_report) summary_files.append(summary_file_name) # Outputing to a bucket if I have one @@ -4925,8 +4934,7 @@ def __report_generate_cis_report(self, level): for key, recommendation in self.cis_foundations_benchmark_2_0.items(): if recommendation['Level'] <= level: - report_file_name = self.__print_to_csv_file( - self.__report_directory, "cis", recommendation['section'] + "_" + recommendation['recommendation_#'], recommendation['Findings']) + report_file_name = self.__print_to_csv_file("cis", recommendation['section'] + "_" + recommendation['recommendation_#'], recommendation['Findings']) if report_file_name and self.__output_bucket: self.__os_copy_report_to_object_storage( self.__output_bucket, report_file_name) @@ -4934,11 +4942,11 @@ def __report_generate_cis_report(self, level): ########################################################################## # Generates an HTML report ########################################################################## - def __report_generate_html_summary_report(self, report_directory, header, file_subject, data): + def __report_generate_html_summary_report(self, header, file_subject, data): try: # Creating report directory - if not os.path.isdir(report_directory): - os.mkdir(report_directory) + if not os.path.isdir(self.__report_directory): + os.mkdir(self.__report_directory) except Exception as e: raise Exception("Error in creating report directory: " + str(e.args)) @@ -4948,11 +4956,10 @@ def __report_generate_html_summary_report(self, report_directory, header, file_s if len(data) == 0: return None - # get the file name of the CSV - + # get the file name of the HTML file_name = header + "_" + file_subject file_name = (file_name.replace(" ", "_")).replace(".", "-").replace("_-_", "_") + ".html" - file_path = os.path.join(report_directory, file_name) + file_path = os.path.join(self.__report_directory, f'{self.__report_prefix}{file_name}') # add report_datetimeto each dictionary result = [dict(item, extract_date=self.start_time_str) @@ -5238,8 +5245,7 @@ def __report_generate_obp_report(self): print_header("Writing Oracle Best Practices reports to CSV") - summary_report_file_name = self.__print_to_csv_file( - self.__report_directory, "obp", "OBP_Summary", obp_summary_report) + summary_report_file_name = self.__print_to_csv_file("obp", "OBP_Summary", obp_summary_report) if summary_report_file_name and self.__output_bucket: self.__os_copy_report_to_object_storage( @@ -5247,13 +5253,11 @@ def __report_generate_obp_report(self): # Printing Findings to CSV for key, value in self.obp_foundations_checks.items(): - report_file_name = self.__print_to_csv_file( - self.__report_directory, "obp", key + "_Findings", value['Findings']) + report_file_name = self.__print_to_csv_file("obp", key + "_Findings", value['Findings']) # Printing OBPs to CSV for key, value in self.obp_foundations_checks.items(): - report_file_name = self.__print_to_csv_file( - self.__report_directory, "obp", key + "_Best_Practices", value['OBP']) + report_file_name = self.__print_to_csv_file("obp", key + "_Best_Practices", value['OBP']) if report_file_name and self.__output_bucket: self.__os_copy_report_to_object_storage( @@ -5289,7 +5293,6 @@ def __collect_tenancy_data(self): self.__identity_read_users, self.__identity_read_tenancy_password_policy, self.__identity_read_dynamic_groups, - self.__audit_read_tenancy_audit_configuration, self.__identity_read_availability_domains, self.__identity_read_tag_defaults, self.__identity_read_tenancy_policies, @@ -5385,146 +5388,61 @@ def __report_generate_raw_data_output(self): # List to store output reports if copying to object storage is required list_report_file_names = [] - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "identity_groups_and_membership", self.__groups_to_users) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "identity_domains", self.__identity_domains) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "identity_users", self.__users) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "identity_policies", self.__policies) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "identity_dynamic_groups", self.__dynamic_groups) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "identity_tags", self.__tag_defaults) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "identity_compartments", self.__raw_compartment) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "network_security_groups", self.__network_security_groups) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "network_security_lists", self.__network_security_lists) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "network_subnets", self.__network_subnets) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "autonomous_databases", self.__autonomous_databases) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "analytics_instances", self.__analytics_instances) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "integration_instances", self.__integration_instances) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "event_rules", self.__event_rules) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "log_groups_and_logs", self.__logging_list) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "object_storage_buckets", self.__buckets) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "boot_volumes", self.__boot_volumes) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "block_volumes", self.__block_volumes) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "file_storage_system", self.__file_storage_system) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "keys_and_vaults", self.__kms_keys) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "ons_subscriptions", self.__subscriptions) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "budgets", self.__budgets) - list_report_file_names.append(report_file_name) - - # Converting a one to one dict to a list - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "service_connectors", list(self.__service_connectors.values())) - list_report_file_names.append(report_file_name) - - # Converting a dict that is one to a list to a flat list - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "network_fastconnects", (list(itertools.chain.from_iterable(self.__network_fastconnects.values())))) - list_report_file_names.append(report_file_name) - - # Converting a dict that is one to a list to a flat list - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "network_ipsec_connections", list(itertools.chain.from_iterable(self.__network_ipsec_connections.values()))) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "network_drgs", self.__raw_network_drgs) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "cloud_guard_target", list(self.__cloud_guard_targets.values())) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "regions", self.__raw_regions) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "network_drg_attachments", list(itertools.chain.from_iterable(self.__network_drg_attachments.values()))) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_csv_file( - self.__report_directory, "raw_data", "instances", self.__Instance) - list_report_file_names.append(report_file_name) - - report_file_name = self.__print_to_json_file( - self.__report_directory, "raw_data", "all_resources", self.__all_resources_json) - list_report_file_names.append(report_file_name) + raw_csv_files = { + "identity_groups_and_membership": self.__groups_to_users, + "identity_domains": self.__identity_domains, + "identity_users": self.__users, + "identity_policies": self.__policies, + "identity_dynamic_groups": self.__dynamic_groups, + "identity_tags": self.__tag_defaults, + "identity_compartments": self.__raw_compartment, + "network_security_groups": self.__network_security_groups, + "network_security_lists": self.__network_security_lists, + "network_subnets": self.__network_subnets, + "autonomous_databases": self.__autonomous_databases, + "analytics_instances": self.__analytics_instances, + "integration_instances": self.__integration_instances, + "event_rules": self.__event_rules, + "log_groups_and_logs": self.__logging_list, + "object_storage_buckets": self.__buckets, + "boot_volumes": self.__boot_volumes, + "block_volumes": self.__block_volumes, + "file_storage_system": self.__file_storage_system, + "keys_and_vaults": self.__kms_keys, + "ons_subscriptions": self.__subscriptions, + "budgets": self.__budgets, + "service_connectors": list(self.__service_connectors.values()), + "network_fastconnects": list(itertools.chain.from_iterable(self.__network_fastconnects.values())), + "network_ipsec_connections": list(itertools.chain.from_iterable(self.__network_ipsec_connections.values())), + "network_drgs": self.__raw_network_drgs, + "cloud_guard_target": list(self.__cloud_guard_targets.values()), + "regions": self.__raw_regions, + "network_drg_attachments": list(itertools.chain.from_iterable(self.__network_drg_attachments.values())), + "instances": self.__Instance + } + for key in raw_csv_files: + rfn = self.__print_to_csv_file('raw_data', key, raw_csv_files[key]) + list_report_file_names.append(rfn) - report_file_name = self.__print_to_json_file( - self.__report_directory, "raw_data", "oci_network_topologies", oci.util.to_dict(self.__network_topology_json)) - list_report_file_names.append(report_file_name) + raw_json_files = { + "all_resources": self.__all_resources_json, + "oci_network_topologies": oci.util.to_dict(self.__network_topology_json) + } + for key in raw_json_files: + rfn = self.__print_to_json_file('raw_data', key, raw_json_files[key]) + list_report_file_names.append(rfn) - report_file_name = self.__print_to_pkl_file( - self.__report_directory, "raw_data", "oci_network_topologies", self.__network_topology_json) - list_report_file_names.append(report_file_name) + raw_pkl_files = { + "oci_network_topologies": self.__network_topology_json + } + for key in raw_pkl_files: + rfn = self.__print_to_pkl_file('raw_data', key, raw_json_files[key]) + list_report_file_names.append(rfn) if self.__output_bucket: for raw_report in list_report_file_names: if raw_report: - self.__os_copy_report_to_object_storage( - self.__output_bucket, raw_report) + self.__os_copy_report_to_object_storage(self.__output_bucket, raw_report) ########################################################################## # Copy Report to Object Storage @@ -5547,12 +5465,12 @@ def __os_copy_report_to_object_storage(self, bucketname, filename): ########################################################################## # Print to CSV ########################################################################## - def __print_to_csv_file(self, report_directory, header, file_subject, data): + def __print_to_csv_file(self, header, file_subject, data): debug("__print_to_csv_file: " + header + "_" + file_subject) try: # Creating report directory - if not os.path.isdir(report_directory): - os.mkdir(report_directory) + if not os.path.isdir(self.__report_directory): + os.mkdir(self.__report_directory) except Exception as e: raise Exception( @@ -5567,7 +5485,7 @@ def __print_to_csv_file(self, report_directory, header, file_subject, data): file_name = header + "_" + file_subject file_name = (file_name.replace(" ", "_")).replace(".", "-").replace("_-_", "_") + ".csv" - file_path = os.path.join(report_directory, file_name) + file_path = os.path.join(self.__report_directory, f'{self.__report_prefix}{file_name}') # add report_datetimeto each dictionary result = [dict(item, extract_date=self.start_time_str) @@ -5614,11 +5532,11 @@ def __print_to_csv_file(self, report_directory, header, file_subject, data): ########################################################################## # Print to JSON ########################################################################## - def __print_to_json_file(self, report_directory, header, file_subject, data): + def __print_to_json_file(self, header, file_subject, data): try: # Creating report directory - if not os.path.isdir(report_directory): - os.mkdir(report_directory) + if not os.path.isdir(self.__report_directory): + os.mkdir(self.__report_directory) except Exception as e: raise Exception( @@ -5634,7 +5552,7 @@ def __print_to_json_file(self, report_directory, header, file_subject, data): file_name = header + "_" + file_subject file_name = (file_name.replace(" ", "_") ).replace(".", "-").replace("_-_","_") + ".json" - file_path = os.path.join(report_directory, file_name) + file_path = os.path.join(self.__report_directory, f'{self.__report_prefix}{file_name}') # Serializing JSON to string json_object = json.dumps(data, indent=4) @@ -5661,11 +5579,11 @@ def __print_to_json_file(self, report_directory, header, file_subject, data): ########################################################################## # Print to PKL ########################################################################## - def __print_to_pkl_file(self, report_directory, header, file_subject, data): + def __print_to_pkl_file(self, header, file_subject, data): try: # Creating report directory - if not os.path.isdir(report_directory): - os.mkdir(report_directory) + if not os.path.isdir(self.__report_directory): + os.mkdir(self.__report_directory) except Exception as e: raise Exception( @@ -5681,7 +5599,7 @@ def __print_to_pkl_file(self, report_directory, header, file_subject, data): file_name = header + "_" + file_subject file_name = (file_name.replace(" ", "_") ).replace(".", "-").replace("_-_","_") + ".pkl" - file_path = os.path.join(report_directory, file_name) + file_path = os.path.join(self.__report_directory, f'{self.__report_prefix}{file_name}') # Writing to json file with open(file_path, 'wb') as pkl_file: @@ -5722,8 +5640,7 @@ def generate_reports(self, level=2): self.__report_generate_raw_data_output() if self.__errors: - error_report = self.__print_to_csv_file( - self.__report_directory, "error", "report", self.__errors) + error_report = self.__print_to_csv_file("error", "report", self.__errors) if self.__output_bucket: if error_report: @@ -5795,8 +5712,7 @@ def create_signer(file_location, config_profile, is_instance_principals, is_dele # check if file exist if env_config_file is None or env_config_section is None: - print( - "*** OCI_CONFIG_FILE and OCI_CONFIG_PROFILE env variables not found, abort. ***") + print("*** OCI_CONFIG_FILE and OCI_CONFIG_PROFILE env variables not found, abort. ***") print("") raise SystemExit @@ -5908,39 +5824,43 @@ def execute_report(): # Get Command Line Parser parser = argparse.ArgumentParser(formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=100, width=180)) parser.add_argument('-c', default="", dest='file_location', - help='OCI config file location') + help='OCI config file location.') parser.add_argument('-t', default="", dest='config_profile', - help='Config file section to use (tenancy profile) ') + help='Config file section to use (tenancy profile).') parser.add_argument('-p', default="", dest='proxy', - help='Set Proxy (i.e. www-proxy-server.com:80) ') + help='Set Proxy (i.e. www-proxy-server.com:80).') parser.add_argument('--output-to-bucket', default="", dest='output_bucket', - help='Set Output bucket name (i.e. my-reporting-bucket) ') + help='Set Output bucket name (i.e. my-reporting-bucket).') parser.add_argument('--report-directory', default=None, dest='report_directory', - help='Set Output report directory by default it is the current date (i.e. reports-date) ') + help='Set Output report directory by default it is the current date (i.e. reports-date).') + parser.add_argument('--report-prefix', default=None, dest='report_prefix', + help='Set Output report prefix to allow unique files for better baseline comparison.') + parser.add_argument('--report-summary-json', action='store_true', default=None, dest='report_summary_json', + help='Write summary report as JSON file, too.') parser.add_argument('--print-to-screen', default='True', dest='print_to_screen', - help='Set to False if you want to see only non-compliant findings (i.e. False) ') + help='Set to False if you want to see only non-compliant findings (i.e. False).') parser.add_argument('--level', default=2, dest='level', - help='CIS Recommendation Level options are: 1 or 2. Set to 2 by default ') + help='CIS Recommendation Level options are: 1 or 2. Set to 2 by default.') parser.add_argument('--regions', default="", dest='regions', - help='Regions to run the compliance checks on, by default it will run in all regions. Sample input: us-ashburn-1,ca-toronto-1,eu-frankfurt-1') + help='Regions to run the compliance checks on, by default it will run in all regions. Sample input: us-ashburn-1,ca-toronto-1,eu-frankfurt-1.') parser.add_argument('--raw', action='store_true', default=False, - help='Outputs all resource data into CSV files') + help='Outputs all resource data into CSV files.') parser.add_argument('--obp', action='store_true', default=False, - help='Checks for OCI best practices') + help='Checks for OCI best practices.') parser.add_argument('--all-resources', action='store_true', default=False, help='Uses Advanced Search Service to query all resources in the tenancy and outputs to a JSON. This also enables OCI Best Practice Checks (--obp) and All resource to csv (--raw) flags.') parser.add_argument('--redact_output', action='store_true', default=False, - help='Redacts OCIDs in output CSV and JSON files') + help='Redacts OCIDs in output CSV and JSON files.') parser.add_argument('-ip', action='store_true', default=False, - dest='is_instance_principals', help='Use Instance Principals for Authentication ') + dest='is_instance_principals', help='Use Instance Principals for Authentication.') parser.add_argument('-dt', action='store_true', default=False, - dest='is_delegation_token', help='Use Delegation Token for Authentication in Cloud Shell') + dest='is_delegation_token', help='Use Delegation Token for Authentication in Cloud Shell.') parser.add_argument('-st', action='store_true', default=False, - dest='is_security_token', help='Authenticate using Security Token') + dest='is_security_token', help='Authenticate using Security Token.') parser.add_argument('-v', action='store_true', default=False, dest='version', help='Show the version of the script and exit.') parser.add_argument('--debug', action='store_true', default=False, - dest='debug', help='Enables debugging messages. This feature is in beta') + dest='debug', help='Enables debugging messages. This feature is in beta.') cmd = parser.parse_args() if cmd.version: @@ -5949,16 +5869,17 @@ def execute_report(): config, signer = create_signer(cmd.file_location, cmd.config_profile, cmd.is_instance_principals, cmd.is_delegation_token, cmd.is_security_token) config['retry_strategy'] = oci.retry.DEFAULT_RETRY_STRATEGY - report = CIS_Report(config, signer, cmd.proxy, cmd.output_bucket, cmd.report_directory, cmd.print_to_screen, \ + report = CIS_Report(config, signer, cmd.proxy, cmd.output_bucket, cmd.report_directory, cmd.report_prefix, cmd.report_summary_json, cmd.print_to_screen, \ cmd.regions, cmd.raw, cmd.obp, cmd.redact_output, debug=cmd.debug, all_resources=cmd.all_resources) csv_report_directory = report.generate_reports(int(cmd.level)) try: if OUTPUT_TO_XLSX: - workbook = Workbook(csv_report_directory + '/Consolidated_Report.xlsx', {'in_memory': True}) - for csvfile in glob.glob(csv_report_directory + '/*.csv'): + report_prefix = f'{cmd.report_prefix}_' if cmd.report_prefix else '' + workbook = Workbook(f'{csv_report_directory}/{report_prefix}Consolidated_Report.xlsx', {'in_memory': True}) + for csvfile in glob.glob(f'{csv_report_directory}/{report_prefix}*.csv'): - worksheet_name = csvfile.split(os.path.sep)[-1].replace(".csv", "").replace("raw_data_", "raw_").replace("Findings", "fds").replace("Best_Practices", "bps") + worksheet_name = csvfile.split(os.path.sep)[-1].replace(report_prefix, "").replace(".csv", "").replace("raw_data_", "raw_").replace("Findings", "fds").replace("Best_Practices", "bps") if "Identity_and_Access_Management" in worksheet_name: worksheet_name = worksheet_name.replace("Identity_and_Access_Management", "IAM") @@ -5986,7 +5907,7 @@ def execute_report(): worksheet.write(r, c, col) workbook.close() except Exception as e: - print("**Failed to output to excel. Please use CSV files.**") + print("** Failed to output to excel. Please use CSV files. **") print(e) diff --git a/cd3_automation_toolkit/commonTools.py b/cd3_automation_toolkit/commonTools.py index b300c638b..a0aff38b8 100644 --- a/cd3_automation_toolkit/commonTools.py +++ b/cd3_automation_toolkit/commonTools.py @@ -48,6 +48,7 @@ def __init__(self): self.home_region="" self.ntk_compartment_ids = {} self.region_dict={} + self.region_ad_dict = {} self.protocol_dict={} self.sheet_dict={} self.reg_filter = None @@ -112,6 +113,8 @@ def __init__(self): # Get Export filters def get_export_filters(self,export_filters): for i in export_filters: + i = i.replace(" ", "") + i = i.replace("\"", "") if 'reg_filter' in i: self.reg_filter = (i.split("=")[1])[2:][:-2] @@ -212,6 +215,16 @@ def authenticate(self,auth_mechanism,config_file_path=DEFAULT_LOCATION): return config,signer + #Get Region ADs + def get_region_ad_dict(self, config, signer): + for reg in self.all_regions: + ADs = [] + config.__setitem__("region", self.region_dict[reg]) + idc = IdentityClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) + ADs_data = idc.list_availability_domains(compartment_id=config['tenancy']) + for AD in ADs_data.data: + ADs.append(AD.name) + self.region_ad_dict[reg] = ADs #Get Tenancy Regions def get_subscribedregions(self, config,signer): diff --git a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx index f1ff289f5..049cf621a 100644 Binary files a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx and b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx index a4bb03f2e..fe0529fa7 100644 Binary files a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx and b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx b/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx index 0e0f85a0c..c51453b12 100644 Binary files a/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx and b/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx b/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx index b60fe4d97..3585c0e58 100644 Binary files a/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx and b/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx differ diff --git a/cd3_automation_toolkit/setUpOCI.properties b/cd3_automation_toolkit/setUpOCI.properties index 3015657bf..f5ae7405a 100644 --- a/cd3_automation_toolkit/setUpOCI.properties +++ b/cd3_automation_toolkit/setUpOCI.properties @@ -2,7 +2,7 @@ #Input variables required to run setUpOCI script -#path to output directory where terraform files will be generated. eg /cd3user/tenancies//terraform_files +#path to output directory where terraform files will be generated. eg. /cd3user/tenancies//terraform_files outdir= #prefix for output terraform files eg like demo diff --git a/cd3_automation_toolkit/setUpOCI.py b/cd3_automation_toolkit/setUpOCI.py index f693ae338..f4ca48fad 100644 --- a/cd3_automation_toolkit/setUpOCI.py +++ b/cd3_automation_toolkit/setUpOCI.py @@ -135,7 +135,7 @@ def execute_options(options, *args, **kwargs): for option in options: if option.name == "Execute All": continue - if option.name in ['Security Rules', 'Route Rules', 'DRG Route Rules', 'Network Security Groups','Customer Connectivity','CIS Compliance Checking Script'] and devops: + if option.name in ['Security Rules', 'Route Rules', 'DRG Route Rules', 'Network Security Groups','Customer Connectivity'] and devops: with section(option.text): option.callback(*args, **kwargs,sub_options=sub_child_options) else: @@ -188,7 +188,13 @@ def update_path_list(regions_path=[],service_dirs=[]): for current_dir in service_dirs: for reg in regions_path: path_value = ((outdir + "/" + reg + "/" + current_dir).rstrip('/')).replace("//","/") - items = glob.glob(path_value + "/*") + all_items = glob.glob(path_value + "/*") + items = [] + for f in all_items: + actual_file = f.split("/")[-1] + if actual_file.startswith("variables") or actual_file.endswith(".tf_backup"): + continue + items.append(f) files = [f for f in items if (os.path.isfile(f) and (datetime.datetime.fromtimestamp(os.path.getmtime(f)) >= exec_start_time))] if files: @@ -249,10 +255,19 @@ def fetch_compartments(outdir, outdir_struct, ct): print("Continuing") compocidsStr = '' + compartments_file_data = "" + comp_done = [] for k,v in ct.ntk_compartment_ids.items(): + if v not in comp_done: + compartments_file_data += k + "\n" + comp_done.append(v) k = commonTools.check_tf_variable(k) v = "\"" + v + "\"" compocidsStr = "\t" + k + " = " + v + "\n" + compocidsStr + + f = open(outdir + "/../.config_files/compartments_file", "w+") + f.write(compartments_file_data[:-1]) + f.close() compocidsStr = "\n" + compocidsStr finalCompStr = "#START_compartment_ocids#" + compocidsStr + "\t#compartment_ocids_END#" for k, v in var_data.items(): @@ -266,7 +281,7 @@ def fetch_compartments(outdir, outdir_struct, ct): f.write(var_data[k]) print("\nCompartment info written to all variables files under outdir...\n") # update fetchcompinfo.safe - fetch_comp_file = f'{outdir}/fetchcompinfo.safe' + fetch_comp_file = f'{outdir}/.safe/fetchcompinfo.safe' with open(fetch_comp_file, 'w+') as f: f.write('run_fetch_script=0') f.close() @@ -840,7 +855,7 @@ def create_network(execute_all=False,prim_options=[]): # Update modified path list regions_path = export_regions.copy() regions_path.append("global") - service_dirs = [service_dir_network, service_dir_nsg, service_dir_vlan, 'rpc'] + service_dirs = [service_dir_network,service_dir_nsg, service_dir_vlan, 'rpc'] update_path_list(regions_path=regions_path, service_dirs=service_dirs) def modify_terraform_network(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy): @@ -1194,8 +1209,7 @@ def create_cis_oss_logs(inputfile, outdir, prefix, ct): def create_cis_features(prim_options=[]): - options = [Option('CIS Compliance Checking Script', initiate_cis_scan, 'CIS Compliance Checking'), - Option("Create Key/Vault", create_cis_keyvault, 'Creating CIS Key/Vault and enable Logging for write events to bucket'), + options = [Option("Create Key/Vault", create_cis_keyvault, 'Creating CIS Key/Vault and enable Logging for write events to bucket'), Option("Create Default Budget",create_cis_budget,'Create Default Budget'), Option("Enable Cloud Guard", enable_cis_cloudguard, 'Enable Cloud Guard'),] @@ -1205,6 +1219,18 @@ def create_cis_features(prim_options=[]): options = show_options(options, quit=True, menu=True, index=1) execute_options(options, outdir, prefix, config_file_path) +def run_utility(prim_options=[]): + options = [Option('CIS Compliance Check Script', initiate_cis_scan, 'CIS Compliance Check Script'), + Option('ShowOCI Report', run_showoci, 'ShowOCI Report') + ] + if prim_options: + options = match_options(options, prim_options) + execute_options(options, outdir, prefix, config_file_path,sub_options=sub_child_options) + + else: + options = show_options(options, quit=True, menu=True, index=1) + execute_options(options, outdir, prefix, config_file_path) + def create_cis_keyvault(*args,**kwargs): if not devops: region_name = input("Enter region name eg ashburn where you want to create Key/Vault: ") @@ -1236,6 +1262,7 @@ def enable_cis_cloudguard(*args,**kwargs): region = input("Enter Reporting Region for Cloud Guard eg london: ") else: region = ct.cg_region + region = region.lower() options = [Option(None, Security.enable_cis_cloudguard, 'Enabling Cloud Guard')] execute_options(options, outdir, service_dir_cloud_guard, prefix, ct, region) # Update modified path list @@ -1244,8 +1271,8 @@ def enable_cis_cloudguard(*args,**kwargs): def initiate_cis_scan(outdir, prefix, config_file,sub_options=[]): options = [ - Option("Download latest compliance checking script", start_cis_download, 'Download CIS script'), - Option("Execute compliance checking script", start_cis_scan, 'Execute CIS script'), + Option("Download latest compliance checking script", start_cis_download, 'Downloading CIS Script'), + Option("Execute compliance checking script", start_cis_scan, 'Executing CIS Script'), ] if sub_options: options = match_options(options, sub_options) @@ -1263,12 +1290,17 @@ def start_cis_download(outdir, prefix, config_file): def start_cis_scan(outdir, prefix, config_file): cmd = "python cis_reports.py" + if auth_mechanism == "instance_principal": + cmd += " -ip" + elif auth_mechanism == "session_token": + cmd += " -st" + else: + cmd += " -c "+config_file if not devops: user_input = input("Enter command to execute the script. Press Enter to execute {} : ".format(cmd)) if user_input!='': cmd = "{}".format(user_input) split = str.split(cmd) - dirname = prefix + "_cis_report" resource = "cis_report" out_rep = outdir + '/'+ dirname @@ -1280,13 +1312,67 @@ def start_cis_scan(outdir, prefix, config_file): else: commonTools.backup_file(outdir, resource, out_rep) - out = ["-c", config_file, '--report-directory', out_rep] - cmd = cmd +" "+ out[0] + " "+out[1] + " "+ out[2] + " " +out[3] + out = ['--report-directory', out_rep] + cmd = cmd +" "+ out[0] + " "+out[1] split.extend(out) print("Executing: "+cmd) print("Scan started!") execute(split, config_file) +def get_latest_showoci(outdir, prefix,config_file): + print("Getting latest showoci report script") + if (os.path.isdir("/tmp/oci-python-sdk")): + shutil.rmtree("/tmp/oci-python-sdk") + cmd = "git clone https://github.com/oracle/oci-python-sdk /tmp/oci-python-sdk" + split = str.split(cmd) + execute(split,config_file) + if (os.path.isdir("/cd3user/oci_tools/oci-python-sdk")): + shutil.rmtree("/cd3user/oci_tools/oci-python-sdk") + shutil.move("/tmp/oci-python-sdk", "/cd3user/oci_tools/oci-python-sdk") + print("Download complete!!") + + +def execute_showoci(outdir, prefix, config_file_path): + if not os.path.isfile("/cd3user/oci_tools/oci-python-sdk/examples/showoci/showoci.py"): + get_latest_showoci(outdir, prefix, config_file=config_file_path) + cmd = "python /cd3user/oci_tools/oci-python-sdk/examples/showoci/showoci.py -a" + if auth_mechanism == "instance_principal": + cmd += " -ip" + elif auth_mechanism == "session_token": + cmd += " -is" + else: + cmd += " -cf "+config_file_path + split = str.split(cmd) + dirname = prefix + "_showoci_report" + resource = "showoci_report" + out_rep = outdir + '/' + dirname + # config = "--config "+ config + commonTools.backup_file(outdir, resource, dirname) + + if not os.path.exists(out_rep): + os.makedirs(out_rep) + else: + commonTools.backup_file(outdir, resource, out_rep) + out_file = out_rep+"/"+prefix + out = ['-csv', out_file] + cmd = cmd + " " + out[0] + " " + out[1] + split.extend(out) + print("Executing: " + cmd) + execute(split, config_file_path) + + + +def run_showoci(outdir, prefix, config_file,sub_options=[]): + options = [ + Option("Download Latest ShowOCI Script", get_latest_showoci, 'Downloading ShowOCI Script'), + Option("Execute ShowOCI Script", execute_showoci, 'Executing ShowOCI Script'), + ] + if sub_options: + options = match_options(options, sub_options) + else: + options = show_options(options, quit=True, menu=True, index=1) + execute_options(options, outdir, prefix, config_file) + def execute(command,config_file): export_cmd_windows = "set OCI_CONFIG_HOME="+config_file export_cmd_linux = "export OCI_CONFIG_HOME=" + config_file @@ -1543,6 +1629,9 @@ def create_firewall(inputfile, outdir, service_dir, prefix, ct,sub_options=[]): subscribed_regions = ct.get_subscribedregions(config,signer) home_region = ct.home_region +## Fetch Region ADs +ct.get_region_ad_dict(config,signer) + # Set service directories as per outdir_structure file # If single outdir, get service names from /cd3user/oci_tools/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties if len(outdir_struct.items())==0: @@ -1571,7 +1660,7 @@ def create_firewall(inputfile, outdir, service_dir, prefix, ct,sub_options=[]): fetchcompinfo_data = "run_fetch_script=0" try: # read fetchcompinfo.safe - fetch_comp_file = f'{outdir}/fetchcompinfo.safe' + fetch_comp_file = f'{outdir}/.safe/fetchcompinfo.safe' with open(fetch_comp_file, 'r') as f: fetchcompinfo_data = f.read() f.close() @@ -1634,7 +1723,8 @@ def create_firewall(inputfile, outdir, service_dir, prefix, ct,sub_options=[]): Option('Logging Services', create_logging, 'Logging Services'), Option('Software-Defined Data Centers - OCVS', create_sddc, 'Processing SDDC Tabs'), Option('CIS Compliance Features', create_cis_features, 'CIS Compliance Features'), - Option('CD3 Services', cd3_services, 'CD3 Services') + Option('CD3 Services', cd3_services, 'CD3 Services'), + Option('3rd Party Services', run_utility,'3rd Party Services') ] export_regions = ct.all_regions @@ -1661,12 +1751,12 @@ def create_firewall(inputfile, outdir, service_dir, prefix, ct,sub_options=[]): if menu: break # write updated paths to a file -updated_paths_file = f'{outdir}/updated_paths.safe' +updated_paths_file = f'{outdir}/.safe/updated_paths.safe' with open(updated_paths_file, 'w+') as f: for item in updated_paths: f.write(str(item).replace('//', '/') + "\n") f.close() -import_scripts_file = f'{outdir}/import_scripts.safe' +import_scripts_file = f'{outdir}/.safe/import_scripts.safe' with open(import_scripts_file, 'w+') as f: for item in import_scripts: f.write(str(item).replace('//', '/') + "\n") diff --git a/cd3_automation_toolkit/setUpOCI_jenkins.py b/cd3_automation_toolkit/setUpOCI_jenkins.py deleted file mode 100644 index 55cc36fd7..000000000 --- a/cd3_automation_toolkit/setUpOCI_jenkins.py +++ /dev/null @@ -1,1172 +0,0 @@ -import argparse -import configparser -import Database -import Identity -import Compute -import ManagementServices -import DeveloperServices -import Security -import cd3Validator -import Storage -import Network -import SDDC -import Governance -from commonTools import * -from collections import namedtuple -import requests -import subprocess -import datetime,glob,os - - -def show_options(options, quit=False, menu=False, extra=None, index=0): - # Just add whitespace between number and option. It just makes it look better - number_offset = len(str(len(options))) + 1 - # Iterate over options. Print number and option - for i, option in enumerate(options, index): - print(f'{str(i)+".":<{number_offset}} {option.name}') - if quit: - print(f'{"q"+".":<{number_offset}} Press q to quit') - if menu: - print(f'{"m"+".":<{number_offset}} Press m to go back to Main Menu') - if extra: - print(extra) - user_input = input('Enter your choice (specify comma separated to choose multiple choices): ') - user_input = user_input.split(',') - if 'q' in user_input or 'm' in user_input: - return user_input - # Subtract one to account for zero-indexing. The options start at 1 - # #return [options[int(choice)-1] for choice in user_input] - try: - return [options[int(choice)-index] for choice in user_input] - except IndexError as ie: - print("\nInvalid Option.....Exiting!!") - exit(1) - except ValueError as ie: - print("\nInvalid Input.....Try again!!\n") - options = show_options(inputs, quit=True, index=index) - return options - - -def execute_options(options, *args, **kwargs): - global menu, quit - if 'm' in options or 'q' in options: - menu = 'm' in options - quit = 'q' in options - else: - for option in options: - with section(option.text): - option.callback(*args, **kwargs) - -def get_region_list(rm): - if rm == False: - input_region_names = ct.reg_filter - else: - input_region_names = ct.orm_reg_filter - input_region_names = list(map(lambda x: x.strip(), input_region_names.split(','))) if input_region_names else None - remove_regions = [] - region_list_fetch = [] - #validate input regions - if (input_region_names is not None): - for x in range(0, len(input_region_names)): - if (input_region_names[x].lower() not in ct.all_regions and input_region_names[x].lower()!='global'): - print("Input region: " + input_region_names[x] + " is not subscribed to OCI Tenancy") - remove_regions.append(input_region_names[x]) - - input_region_names = [x.lower() for x in input_region_names if x not in remove_regions] - if (len(input_region_names) == 0): - print("None of the input regions specified are subscribed to OCI..Exiting!!!") - exit(1) - else: - print("\nFetching for Regions... " + str(input_region_names)) - region_list_fetch = input_region_names - else: - print("Fetching for all Regions OCI tenancy is subscribed to...") - region_list_fetch = ct.all_regions - # include global dir for RM stack upload - if rm == True: - region_list_fetch.append('global') - return region_list_fetch - -def update_path_list(regions_path=[],service_dirs=[]): - # Update modified path list - for current_dir in service_dirs: - for reg in regions_path: - path_value = ((outdir + "/" + reg + "/" + current_dir).rstrip('/')).replace("//","/") - items = glob.glob(path_value + "/*") - files = [f for f in items if - (os.path.isfile(f) and (datetime.datetime.fromtimestamp(os.path.getmtime(f)) >= exec_start_time))] - if files: - if path_value not in updated_paths: - updated_paths.append(path_value) - for script_file in files: - if script_file.endswith(".sh") and script_file not in import_scripts: - import_scripts.append(script_file) - -def fetch_compartments(outdir, outdir_struct, ct): - var_files={} - var_data = {} - home_region = ct.home_region - print("outdir specified should contain region directories and then variables_.tf file inside the region directories eg /cd3user/tenancies//terraform_files") - print("Verifying out directory and Taking backup of existing variables files...Please wait...") - print("\nFetching Compartment Info...Please wait...") - ct.get_network_compartment_ids(config['tenancy'], "root", config, signer) - ct.all_regions.append('global') - print("\nWriting to variables files...") - home_region_services = ['identity', 'tagging', 'budget'] - for region in ct.all_regions: - # for global directory - if region == 'global': - file = f'{outdir}/{region}/rpc/variables_{region}.tf' - var_files[region] = file - try: - # Read variables file data - with open(file, 'r') as f: - var_data[region] = f.read() - except FileNotFoundError as e: - print(f'\nVariables file not found in - {region}.......') - print("Continuing") - - # Fetch variables file inside region directories - single outdir - elif len(outdir_struct) == 0: - file = f'{outdir}/{region}/variables_{region}.tf' - var_files[region]=file - try: - # Read variables file data - with open(file, 'r') as f: - var_data[region] = f.read() - except FileNotFoundError as e: - print(f'\nVariables file not found in - {region}.......') - print("Continuing") - - # Fetch variables file inside service directories - separate outdir - else: - for k, v in outdir_struct.items(): - if ((k not in home_region_services) or ((k in home_region_services) and region == home_region)) and v != '': - file = f'{outdir}/{region}/{v}/variables_{region}.tf' - var_files[region + "-" + v] = file - try: - # Read variables file data - with open(file, 'r') as f: - var_data[region + "-" + v] = f.read() - except FileNotFoundError as e: - print(f'\nVariables file not found in - {region}/{v}/.......') - print("Continuing") - - compocidsStr = '' - for k,v in ct.ntk_compartment_ids.items(): - k = commonTools.check_tf_variable(k) - v = "\"" + v + "\"" - compocidsStr = "\t" + k + " = " + v + "\n" + compocidsStr - compocidsStr = "\n" + compocidsStr - finalCompStr = "#START_compartment_ocids#" + compocidsStr + "\t#compartment_ocids_END#" - for k, v in var_data.items(): - var_data[k] = re.sub('#START_compartment_ocids#.*?#compartment_ocids_END#', finalCompStr, - var_data[k], flags=re.DOTALL) - # Write variables file data - with open(var_files[k], "w") as f: - # Backup the existing Routes tf file - file = var_files[k] - shutil.copy(file, file + "_backup") - f.write(var_data[k]) - print("\nCompartment info written to all variables files under outdir...\n") - # update fetchcompinfo.safe - fetch_comp_file = f'{outdir}/fetchcompinfo.safe' - with open(fetch_comp_file, 'w+') as f: - f.write('run_fetch_script=0') - f.close() - ct.all_regions = ct.all_regions[:-1] - -################## Validate Function ######################### -def validate_cd3(options=[]): - choices = [] - choice_items = [] - for opt in options: - choice_items = [] - if opt in ['Validate Compartments','Validate Groups','Validate Policies','Validate Tags','Validate Networks','Validate DNS','Validate Instances','Validate Block Volumes','Validate FSS','Validate Buckets']: - if opt == "Validate Networks": - opt = "Validate Network(VCNs, SubnetsVLANs, DHCP, DRGs)" - choice_items.append(opt) - choices.append(choice_items) - cd3Validator.validate_cd3(choices,inputfile, var_file, prefix, outdir, ct) # config, signer, ct) - print("Exiting CD3 Validation...") - -################## Export Identity ########################## -def export_identityOptions(options=[]): - service_dirs = [] - for opt in options: - if opt == "Export Compartments/Groups/Policies": - export_compartmentPoliciesGroups(inputfile, outdir, service_dir_identity, config,signer, ct) - service_dirs = [service_dir_identity] - elif opt == "Export Users": - export_users(inputfile, outdir, service_dir_identity, config,signer, ct) - service_dirs = [service_dir_identity] - elif opt == "Export Network Sources": - export_networkSources(inputfile, outdir, service_dir_identity, config,signer, ct) - service_dirs = [service_dir_identity] - # Update modified path list - update_path_list(regions_path=[ct.home_region], service_dirs=service_dirs) - - -def export_compartmentPoliciesGroups(inputfile, outdir, service_dir, config, signer, ct): - compartments = ct.get_compartment_map(var_file, 'Identity Objects') - Identity.export_identity(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments) - create_identity(options=['Add/Modify/Delete Compartments','Add/Modify/Delete Groups','Add/Modify/Delete Policies']) - print("\n\nExecute tf_import_commands_identity_nonGF.sh script created under home region directory to synch TF with OCI Identity objects\n") - - -def export_users(inputfile, outdir, service_dir, config,signer, ct): - Identity.Users.export_users(inputfile, outdir, service_dir, config, signer, ct) - create_identity(options=['Add/Modify/Delete Users']) - print("\n\nExecute tf_import_commands_users_nonGF.sh script created under home region directory to synch TF with OCI Identity objects\n") - - -def export_networkSources(inputfile, outdir, service_dir, config, signer, ct): - compartments = ct.get_compartment_map(var_file, 'Identity Objects') - Identity.NetworkSources.export_networkSources(inputfile, outdir, service_dir, config, signer, ct) - create_identity(options=['Add/Modify/Delete Network Sources']) - print("\n\nExecute tf_import_commands_networkSources_nonGF.sh script created under home region directory to synch TF with OCI Identity objects\n") - - -def export_tags(options=[]): - compartments = ct.get_compartment_map(var_file, 'Tagging Objects') - Governance.export_tags_nongreenfield(inputfile, outdir, service_dir_tagging, config, signer, ct, export_compartments=compartments) - create_tags() - print("\n\nExecute tf_import_commands_tags_nonGF.sh script created under home region directory to synch TF with OCI Tags\n") - # Update modified path list - update_path_list(regions_path=[ct.home_region], service_dirs=[service_dir_tagging]) - - -def export_network(options=[]): - service_dirs = [] - for opt in options: - if opt == "Export all Network Components": - export_networking(inputfile, outdir, outdir_struct, config, signer, ct, export_regions) - service_dirs = [service_dir_network, service_dir_nsg, service_dir_vlan] - if opt == "Export Network components for VCNs/DRGs/DRGRouteRulesinOCI Tabs": - export_major_objects(inputfile, outdir, service_dir_network, config, signer, ct, export_regions) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == "Export Network components for DHCP Tab": - export_dhcp(inputfile, outdir, service_dir_network, config, signer, ct, export_regions) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == "Export Network components for SecRulesinOCI Tab": - export_secrules(inputfile, outdir, service_dir_network, config, signer, ct, export_regions) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == "Export Network components for RouteRulesinOCI Tab": - export_routerules(inputfile, outdir, service_dir_network, config, signer, ct, export_regions) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == "Export Network components for SubnetsVLANs Tab": - export_subnets_vlans(inputfile, outdir, outdir_struct, config, signer, ct, export_regions) - service_dirs.append(service_dir_vlan) if service_dir_vlan not in service_dirs else service_dirs - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == "Export Network components for NSGs Tab": - export_nsg(inputfile, outdir, service_dir_nsg, config, signer, ct, export_regions) - service_dirs.append(service_dir_nsg) if service_dir_nsg not in service_dirs else service_dirs - - print("=====================================================================================================================") - print("NOTE: Make sure to execute tf_import_commands_network_major-objects_nonGF.sh before executing the other scripts.") - print("=====================================================================================================================") - - # Update modified path list - regions_path = export_regions.copy() - regions_path.append("global") - service_dirs.append("rpc") - update_path_list(regions_path=regions_path, service_dirs=service_dirs) - - -def export_networking(inputfile, outdir, service_dir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Network Objects') - Network.export_networking(inputfile, outdir, service_dir,config, signer, ct, export_compartments=compartments, export_regions=export_regions) - Network.create_major_objects(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy) - Network.create_rpc_resource(inputfile, outdir, service_dir_network, prefix, auth_mechanism, config_file_path, ct, non_gf_tenancy) - Network.create_terraform_dhcp_options(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy) - Network.modify_terraform_secrules(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy) - Network.modify_terraform_routerules(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy) - Network.modify_terraform_drg_routerules(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy) - Network.create_terraform_drg_route(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy, - network_connectivity_in_setupoci='', modify_network=False) - Network.create_terraform_subnet_vlan(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy, network_vlan_in_setupoci='network') - Network.create_terraform_subnet_vlan(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy, network_vlan_in_setupoci='vlan') - Network.create_terraform_nsg(inputfile, outdir, service_dir_nsg, prefix, ct) - - print("\n\nExecute tf_import_commands_network_*_nonGF.sh script created under each region directory to synch TF with OCI Network objects\n") - - -def export_major_objects(inputfile, outdir, service_dir_network, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'VCN Major Objects') - Network.export_major_objects(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions) - Network.export_drg_routetable(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) - Network.create_major_objects(inputfile, outdir,service_dir_network, prefix, ct, non_gf_tenancy) - Network.create_rpc_resource(inputfile, outdir, service_dir_network, prefix, auth_mechanism, config_file_path, ct, non_gf_tenancy) - Network.create_terraform_drg_route(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy,network_connectivity_in_setupoci='', modify_network=False) - print("\n\nExecute tf_import_commands_network_major-objects_nonGF.sh and tf_import_commands_network_drg_routerules_nonGF.sh scripts created under each region directory to synch TF with OCI Network objects\n") - - -def export_dhcp(inputfile, outdir, service_dir_network,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'DHCP') - Network.export_dhcp(inputfile, outdir, service_dir_network,config, signer, ct, export_compartments=compartments, export_regions=export_regions) - Network.create_terraform_dhcp_options(inputfile, outdir, service_dir_network,prefix, ct, non_gf_tenancy, ct) - print("\n\nExecute tf_import_commands_network_dhcp_nonGF.sh script created under each region directory to synch TF with OCI Network objects\n") - - -def export_secrules(inputfile, outdir, service_dir_network,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'SecRulesInOCI') - Network.export_seclist(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) - Network.modify_terraform_secrules(inputfile, outdir,service_dir_network, prefix, ct, non_gf_tenancy) - print("\n\nExecute tf_import_commands_network_secrules_nonGF.sh script created under each region directory to synch TF with OCI Network objects\n") - - -def export_routerules(inputfile, outdir, service_dir_network,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'RouteRulesInOCI') - Network.export_routetable(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) - Network.modify_terraform_routerules(inputfile, outdir, service_dir_network,prefix, ct, non_gf_tenancy) - print("\n\nExecute tf_import_commands_network_routerules_nonGF.sh script created under each region directory to synch TF with OCI Network objects\n") - - -def export_subnets_vlans(inputfile, outdir, service_dir,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'Subnets') - Network.export_subnets_vlans(inputfile, outdir, service_dir,config, signer, ct, export_compartments=compartments, export_regions=export_regions) - Network.create_terraform_subnet_vlan(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy, network_vlan_in_setupoci='network') - Network.create_terraform_subnet_vlan(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy, network_vlan_in_setupoci='vlan') - print("\n\nExecute tf_import_commands_network_subnets_nonGF.sh script created under each region directory to synch TF with OCI Network objects") - print("\nExecute tf_import_commands_network_vlans_nonGF.sh script created under each region directory to synch TF with OCI Network objects\n") - - -def export_nsg(inputfile, outdir, service_dir_nsg,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'NSGs') - Network.export_nsg(inputfile, outdir,service_dir_nsg, config,signer,ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) - Network.create_terraform_nsg(inputfile, outdir, service_dir_nsg,prefix, ct) - print("\n\nExecute tf_import_commands_network_nsg_nonGF.sh script created under each region directory to synch TF with OCI Network objects\n") - - -def export_compute(options=[]): - for opt in options: - if opt == "Export Dedicated VM Hosts": - export_dedicatedvmhosts(inputfile, outdir, config, signer, ct, export_regions) - if opt == "Export Instances (excludes instances launched by OKE)": - export_instances(inputfile, outdir, config, signer, ct, export_regions) - - -def export_dedicatedvmhosts(inputfile, outdir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Dedicated VM Hosts') - Compute.export_dedicatedvmhosts(inputfile, outdir, service_dir_dedicated_vm_host, config, signer, ct, export_compartments=compartments, export_regions=export_regions) - create_dedicatedvmhosts(inputfile, outdir, service_dir_dedicated_vm_host, prefix, ct) - print("\n\nExecute tf_import_commands_dedicatedvmhosts_nonGF.sh script created under each region directory to synch TF with OCI Dedicated VM Hosts\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_dedicated_vm_host]) - - -def export_instances(inputfile, outdir,config,signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Instances') - display_name_str = ct.ins_pattern_filter if ct.ins_pattern_filter else None - ad_name_str = ct.ins_ad_filter if ct.ins_ad_filter else None - display_names = list(map(lambda x: x.strip(), display_name_str.split(','))) if display_name_str else None - ad_names = list(map(lambda x: x.strip(), ad_name_str.split(','))) if ad_name_str else None - Compute.export_instances(inputfile, outdir, service_dir_instance,config,signer,ct, export_compartments=compartments, export_regions=export_regions, display_names = display_names, ad_names = ad_names) - create_instances(inputfile, outdir, service_dir_instance,prefix, ct) - print("\n\nExecute tf_import_commands_instances_nonGF.sh script created under each region directory to synch TF with OCI Instances\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_instance]) - - -def export_storage(options=[]): - for opt in options: - if opt == "Export Block Volumes/Block Backup Policy": - export_block_volumes(inputfile, outdir, config, signer, ct, export_regions) - if opt == "Export File Systems": - export_fss(inputfile, outdir, config, signer, ct, export_regions) - if opt == "Export Object Storage Buckets": - export_buckets(inputfile, outdir, config, signer, ct, export_regions) - - -def export_block_volumes(inputfile, outdir,config,signer,ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Block Volumes') - display_name_str = ct.bv_pattern_filter if ct.bv_pattern_filter else None - ad_name_str = ct.bv_ad_filter if ct.bv_ad_filter else None - display_names = list(map(lambda x: x.strip(), display_name_str.split(','))) if display_name_str else None - ad_names = list(map(lambda x: x.strip(), ad_name_str.split(','))) if ad_name_str else None - Storage.export_blockvolumes(inputfile, outdir, service_dir_block_volume, config,signer,ct, export_compartments=compartments, export_regions=export_regions, display_names = display_names, ad_names = ad_names) - Storage.create_terraform_block_volumes(inputfile, outdir, service_dir_block_volume, prefix, ct) - print("\n\nExecute tf_import_commands_blockvolumes_nonGF.sh script created under each region directory to synch TF with OCI Block Volume Objects\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_block_volume]) - - -def export_fss(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'FSS objects') - Storage.export_fss(inputfile, outdir, service_dir_fss, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - Storage.create_terraform_fss(inputfile, outdir, service_dir_fss, prefix, ct) - print("\n\nExecute tf_import_commands_fss_nonGF.sh script created under each region directory to synch TF with OCI FSS objects\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_fss]) - - -def export_buckets(inputfile, outdir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'Buckets') - Storage.export_buckets(inputfile, outdir, service_dir_object_storage, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - Storage.create_terraform_oss(inputfile, outdir, service_dir_object_storage, prefix, ct) - print("\n\nExecute tf_import_commands_buckets_nonGF.sh script created under each region directory to synch TF with OCI Object Storage Buckets\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_object_storage]) - - -def export_loadbalancer(options=[]): - for opt in options: - if opt == "Export Load Balancers": - export_lbr(inputfile, outdir, config, signer, ct, export_regions) - if opt == "Export Network Load Balancers": - export_nlb(inputfile, outdir, config, signer, ct, export_regions) - - -def export_lbr(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'LBR objects') - Network.export_lbr(inputfile, outdir, service_dir_loadbalancer, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - create_lb(inputfile, outdir,service_dir_loadbalancer, prefix, ct) - print("\n\nExecute tf_import_commands_lbr_nonGF.sh script created under each region directory to synch TF with OCI LBR objects\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_loadbalancer]) - - -def export_nlb(inputfile, outdir,config,signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'NLB objects') - Network.export_nlb(inputfile, outdir, service_dir_networkloadbalancer, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - create_nlb(inputfile, outdir,service_dir_networkloadbalancer, prefix, ct) - print("\n\nExecute tf_import_commands_nlb_nonGF.sh script created under each region directory to synch TF with OCI NLB objects\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_networkloadbalancer]) - - -def export_databases(options=[]): - for opt in options: - if opt == "Export Virtual Machine or Bare Metal DB Systems": - export_dbsystems_vm_bm(inputfile, outdir, config, signer, ct, export_regions) - if opt == "Export EXA Infra and EXA VMClusters": - export_exa_infra_vmclusters(inputfile, outdir, config, signer, ct, export_regions) - if opt == 'Export ADBs': - export_adbs(inputfile, outdir, config, signer, ct, export_regions) - - -def export_dbsystems_vm_bm(inputfile, outdir,config,signer, ct,export_regions): - compartments = ct.get_compartment_map(var_file,'VM and BM DB Systems') - Database.export_dbsystems_vm_bm(inputfile, outdir, service_dir_dbsystem_vm_bm, config,signer,ct, export_compartments=compartments, export_regions= export_regions) - Database.create_terraform_dbsystems_vm_bm(inputfile, outdir, service_dir_dbsystem_vm_bm, prefix, ct) - print("\n\nExecute tf_import_commands_dbsystems-vm-bm_nonGF.sh script created under each region directory to synch TF with DBSystems\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_dbsystem_vm_bm]) - - -def export_exa_infra_vmclusters(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'EXA Infra and EXA VMClusters') - Database.export_exa_infra(inputfile, outdir, service_dir_database_exacs, config,signer,ct, export_compartments=compartments, export_regions= export_regions) - Database.export_exa_vmclusters(inputfile, outdir, service_dir_database_exacs, config,signer,ct, export_compartments=compartments, export_regions= export_regions) - create_exa_infra_vmclusters(inputfile, outdir, service_dir_database_exacs, prefix,ct) - print("\n\nExecute tf_import_commands_exa-infra_nonGF.sh and tf_import_commands_exa-vmclusters_nonGF.sh scripts created under each region directory to synch TF with Exa-Infra and Exa-VMClusters\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_database_exacs]) - - -def export_adbs(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'ADBs') - Database.export_adbs(inputfile, outdir, service_dir_adb, config,signer,ct, export_compartments=compartments, export_regions= export_regions) - Database.create_terraform_adb(inputfile, outdir, service_dir_adb, prefix, ct) - print("\n\nExecute tf_import_commands_adb_nonGF.sh script created under each region directory to synch TF with OCI ADBs\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_adb]) - - -def export_management_services(options=[]): - service_dirs = [] - for opt in options: - if opt == "Export Notifications": - export_notifications(inputfile, outdir, service_dir_managementservices, config, signer, ct, export_regions) - service_dirs = [service_dir_managementservices] - if opt == "Export Events": - export_events(inputfile, outdir, service_dir_managementservices, config, signer, ct, export_regions) - service_dirs = [service_dir_managementservices] - if opt == "Export Alarms": - export_alarms(inputfile, outdir, service_dir_managementservices, config, signer, ct, export_regions) - service_dirs = [service_dir_managementservices] - if opt == "Export Service Connectors": - export_service_connectors(inputfile, outdir, service_dir_managementservices, config, signer, ct, export_regions) - service_dirs = [service_dir_managementservices] - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_managementservices]) - - -def export_notifications(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Notifications') - ManagementServices.export_notifications(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - ManagementServices.create_terraform_notifications(inputfile, outdir, service_dir, prefix, ct) - print("\n\nExecute tf_import_commands_notifications_nonGF.sh script created under each region directory to synch TF with OCI Notifications\n") - - -def export_events(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Events') - ManagementServices.export_events(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - ManagementServices.create_terraform_events(inputfile, outdir, service_dir, prefix, ct) - print("\n\nExecute tf_import_commands_events_nonGF.sh script created under each region directory to synch TF with OCI Events\n") - - -def export_alarms(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Alarms') - ManagementServices.export_alarms(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - ManagementServices.create_terraform_alarms(inputfile, outdir,service_dir, prefix, ct) - print("\n\nExecute tf_import_commands_alarms_nonGF.sh script created under each region directory to synch TF with OCI Alarms\n") - - -def export_service_connectors(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Service Connectors') - ManagementServices.export_service_connectors(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) - ManagementServices.create_service_connectors(inputfile, outdir, service_dir, prefix, ct) - print("\n\nExecute tf_import_commands_serviceconnectors_nonGF.sh script created under each region directory to synch TF with OCI Service Connectors\n") - - -def export_developer_services(options=[]): - for opt in options: - if opt == "Export OKE cluster and Nodepools": - export_oke(inputfile, outdir, config, signer, ct, export_regions) - - -def export_oke(inputfile, outdir, config,signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'OKE') - DeveloperServices.export_oke(inputfile, outdir, service_dir_oke,config,signer,ct, export_compartments=compartments, export_regions=export_regions) - DeveloperServices.create_terraform_oke(inputfile, outdir, service_dir_oke,prefix, ct) - print("\n\nExecute tf_import_commands_oke_nonGF.sh script created under each region directory to synch TF with OKE\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_oke]) - - -def export_sddc(): - compartments = ct.get_compartment_map(var_file,'SDDCs') - SDDC.export_sddc(inputfile, outdir, service_dir_sddc,config,signer,ct, export_compartments=compartments, export_regions=export_regions) - SDDC.create_terraform_sddc(inputfile, outdir, service_dir_sddc, prefix, ct) - print("\n\nExecute tf_import_commands_sddcs_nonGF.sh script created under each region directory to synch TF with SDDC\n") - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=[service_dir_sddc]) - - -def export_dns(options=[]): - service_dirs = [] - for opt in options: - if opt == "Export DNS Views/Zones/Records": - export_dns_views_zones_rrsets(inputfile, outdir, service_dir_dns, config, signer, ct, export_regions) - service_dirs = [service_dir_dns] - if opt == "Export DNS Resolvers": - export_dns_resolvers(inputfile, outdir, service_dir_dns, config, signer, ct, export_regions) - service_dirs = [service_dir_dns] - # Update modified path list - update_path_list(regions_path=export_regions, service_dirs=service_dirs) - - -def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'DNS Views ,attached zones and rrsets') - dns_filter = None - if ct.default_dns: - if ct.default_dns.lower() == "false": - dns_filter = "n" - if ct.default_dns.lower() == "true": - dns_filter = "y" - - dns_filter = dns_filter if dns_filter else None - Network.export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, dns_filter=dns_filter, export_compartments=compartments, export_regions=export_regions) - create_terraform_dns(inputfile, outdir, service_dir, prefix, ct) - -def export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'DNS Resolvers') - Network.export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions=export_regions) - Network.create_terraform_dns_resolvers(inputfile, outdir, service_dir, prefix, ct) - - -def cd3_services(options=[]): - for opt in options: - if opt == 'Fetch Compartments OCIDs to variables file': - fetch_compartments(outdir, outdir_struct, ct) - if opt == 'Fetch Protocols to OCI_Protocols': - fetch_protocols(outdir, outdir_struct, ct) - - -def fetch_protocols(outdir, outdir_struct, ct): - cd3service.fetch_protocols() - -################## Create Functions ########################## -def create_identity(options=[]): - service_dirs = [] - for opt in options: - if opt == 'Add/Modify/Delete Compartments': - Identity.create_terraform_compartments(inputfile, outdir,service_dir_identity, prefix, ct) - service_dirs = [service_dir_identity] - if opt == 'Add/Modify/Delete Groups': - Identity.create_terraform_groups(inputfile, outdir,service_dir_identity, prefix, ct) - service_dirs = [service_dir_identity] - if opt == 'Add/Modify/Delete Policies': - Identity.create_terraform_policies(inputfile, outdir,service_dir_identity, prefix, ct) - service_dirs = [service_dir_identity] - if opt == 'Add/Modify/Delete Users': - Identity.Users.create_terraform_users(inputfile, outdir,service_dir_identity, prefix, ct) - service_dirs = [service_dir_identity] - if opt == 'Add/Modify/Delete Network Sources': - Identity.NetworkSources.create_terraform_networkSources(inputfile, outdir,service_dir_identity, prefix, ct) - service_dirs = [service_dir_identity] - # Update modified path list - update_path_list(regions_path=[ct.home_region], service_dirs=[service_dir_identity]) - - -def create_tags(): - Governance.create_terraform_tags(inputfile, outdir, service_dir_tagging, prefix, ct) - # Update modified path list - update_path_list(regions_path=[ct.home_region], service_dirs=[service_dir_tagging]) - - -def create_network(options=[], sub_options=[]): - service_dirs = [] - for opt in options: - if opt == 'Create Network': - Network.create_all_tf_objects(inputfile, outdir, outdir_struct, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs = [service_dir_network, service_dir_nsg, service_dir_vlan] - if opt == 'Modify Network': - modify_terraform_network(inputfile, outdir, outdir_struct, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == 'Security Rules': - export_modify_security_rules(sub_options, inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == 'Route Rules': - export_modify_route_rules(sub_options, inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == 'DRG Route Rules': - export_modify_drg_route_rules(sub_options, inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == 'Network Security Groups': - export_modify_nsgs(sub_options, inputfile, outdir, service_dir_nsg, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs.append(service_dir_nsg) if service_dir_nsg not in service_dirs else service_dirs - if opt == 'Add/Modify/Delete VLANs': - create_vlans(inputfile, outdir, outdir_struct, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs.append(service_dir_vlan) if service_dir_vlan not in service_dirs else service_dirs - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == 'Customer Connectivity': - create_drg_connectivity(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy=non_gf_tenancy) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - # Update modified path list - regions_path = subscribed_regions.copy() - regions_path.append("global") - service_dirs.append("rpc") - update_path_list(regions_path=regions_path, service_dirs=service_dirs) - - -def modify_terraform_network(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy): - Network.create_all_tf_objects(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy=non_gf_tenancy, modify_network=True, ) - -def export_modify_security_rules(sub_options,inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy): - for opt in sub_options: - if opt == 'Export Security Rules (From OCI into SecRulesinOCI sheet)': - export_security_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy=non_gf_tenancy) - if opt == 'Add/Modify/Delete Security Rules (Reads SecRulesinOCI sheet)': - Network.modify_terraform_secrules(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy) - -def export_security_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy): - compartments = ct.get_compartment_map(var_file, 'OCI Security Rules') - Network.export_seclist(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) - -def export_modify_route_rules(sub_options,inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy): - execute_all = False - for opt in sub_options: - if opt == 'Export Route Rules (From OCI into RouteRulesinOCI sheet)': - export_route_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy=non_gf_tenancy) - if opt == 'Add/Modify/Delete Route Rules (Reads RouteRulesinOCI sheet)': - Network.modify_terraform_routerules(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy) - - -def export_route_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy): - compartments = ct.get_compartment_map(var_file, 'OCI Route Rules') - Network.export_routetable(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) - -def export_modify_drg_route_rules(sub_options, inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy): - execute_all = False - for opt in sub_options: - if opt == 'Export DRG Route Rules (From OCI into DRGRouteRulesinOCI sheet)': - export_drg_route_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy=non_gf_tenancy) - if opt == 'Add/Modify/Delete DRG Route Rules (Reads DRGRouteRulesinOCI sheet)': - Network.modify_terraform_drg_routerules(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy) - - -def export_drg_route_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy): - compartments = ct.get_compartment_map(var_file,'OCI DRG Route Rules') - Network.export_drg_routetable(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) - - -def export_modify_nsgs(sub_options, inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy): - execute_all = False - for opt in sub_options: - if opt == 'Export NSGs (From OCI into NSGs sheet)': - export_nsgs(inputfile, outdir, service_dir, prefix, ct) - if opt == 'Add/Modify/Delete NSGs (Reads NSGs sheet)': - Network.create_terraform_nsg(inputfile, outdir, service_dir, prefix, ct) - -def export_nsgs(inputfile, outdir, service_dir, prefix, ct): - compartments = ct.get_compartment_map(var_file,'OCI NSGs') - Network.export_nsg(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) - -def create_vlans(inputfile, outdir, service_dir, prefix,ct, non_gf_tenancy, network_vlan_in_setupoci='vlan'): - Network.create_terraform_subnet_vlan(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy=non_gf_tenancy, network_vlan_in_setupoci='vlan',modify_network=True) - Network.create_terraform_route(inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy=non_gf_tenancy, network_vlan_in_setupoci='vlan',modify_network=True) - -def create_drg_connectivity(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy,network_vlan_in_setupoci='vlan'): - execute_all = False - create_rpc( inputfile, outdir, service_dir, service_dir, prefix, auth_mechanism, config_file_path, ct, non_gf_tenancy=non_gf_tenancy) - -def create_rpc(inputfile, outdir, service_dir, service_dir_network, prefix, auth_mechanism, config_file_path, ct, non_gf_tenancy): - Network.create_rpc_resource(inputfile, outdir, service_dir, prefix, auth_mechanism, config_file_path, ct, non_gf_tenancy=non_gf_tenancy) - Network.create_terraform_drg_route(inputfile, outdir, service_dir_network, prefix, non_gf_tenancy=non_gf_tenancy, ct=ct, network_connectivity_in_setupoci='connectivity', modify_network=True) - -def create_compute(options=[]): - service_dirs = [] - for opt in options: - if opt == 'Add/Modify/Delete Dedicated VM Hosts': - create_dedicatedvmhosts(inputfile, outdir, service_dir_dedicated_vm_host,prefix, ct) - service_dirs.append(service_dir_dedicated_vm_host) if service_dir_dedicated_vm_host not in service_dirs else service_dirs - - if opt == 'Add/Modify/Delete Instances/Boot Backup Policy': - create_instances(inputfile, outdir, service_dir_instance,prefix, ct) - service_dirs.append(service_dir_instance) if service_dir_instance not in service_dirs else service_dirs - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=service_dirs) - - -def create_instances(inputfile, outdir, service_dir,prefix,ct): - Compute.create_terraform_instances(inputfile, outdir, service_dir, prefix, ct) - - -def create_dedicatedvmhosts(inputfile, outdir, service_dir, prefix,ct): - Compute.create_terraform_dedicatedhosts(inputfile, outdir, service_dir,prefix, ct) - - -def create_storage(options=[]): - service_dirs = [] - for opt in options: - if opt == 'Add/Modify/Delete Block Volumes/Block Backup Policy': - Storage.create_terraform_block_volumes(inputfile, outdir, service_dir_block_volume, prefix, ct) - service_dirs.append(service_dir_block_volume) if service_dir_block_volume not in service_dirs else service_dirs - if opt == 'Add/Modify/Delete File Systems': - Storage.create_terraform_fss(inputfile, outdir, service_dir_fss, prefix, ct) - service_dirs.append(service_dir_fss) if service_dir_fss not in service_dirs else service_dirs - if opt == 'Add/Modify/Delete Object Storage Buckets': - Storage.create_terraform_oss( inputfile, outdir, service_dir_object_storage, prefix, ct) - service_dirs.append(service_dir_object_storage) if service_dir_object_storage not in service_dirs else service_dirs - #Option('Enable Object Storage Buckets Write Logs', create_cis_oss_logs, '') - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=service_dirs) - - -def create_loadbalancer(options=[]): - service_dirs = [] - for opt in options: - if opt == 'Add/Modify/Delete Load Balancers': - create_lb(inputfile, outdir,service_dir_loadbalancer, prefix, ct) - service_dirs.append(service_dir_loadbalancer) if service_dir_loadbalancer not in service_dirs else service_dirs - if opt == 'Add/Modify/Delete Network Load Balancers': - create_nlb(inputfile, outdir,service_dir_networkloadbalancer, prefix, ct) - service_dirs.append(service_dir_networkloadbalancer) if service_dir_networkloadbalancer not in service_dirs else service_dirs - #Option('Enable LBaaS Logs', enable_lb_logs, 'LBaaS Logs') - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=service_dirs) - - -def create_lb(inputfile, outdir,service_dir, prefix, ct): - Network.create_terraform_lbr_hostname_certs(inputfile, outdir, service_dir, prefix, ct) - Network.create_backendset_backendservers(inputfile, outdir, service_dir, prefix, ct) - Network.create_listener(inputfile, outdir, service_dir, prefix, ct) - Network.create_path_route_set(inputfile, outdir, service_dir, prefix, ct) - Network.create_ruleset(inputfile, outdir, service_dir, prefix, ct) - - -def create_nlb(inputfile, outdir,service_dir, prefix, ct): - Network.create_terraform_nlb_listener(inputfile, outdir, service_dir, prefix, ct) - Network.create_nlb_backendset_backendservers(inputfile, outdir, service_dir, prefix, ct) - - -def create_databases(options=[]): - service_dirs = [] - for opt in options: - if opt == 'Add/Modify/Delete Virtual Machine or Bare Metal DB Systems': - Database.create_terraform_dbsystems_vm_bm(inputfile, outdir, service_dir_dbsystem_vm_bm, prefix, ct) - service_dirs.append(service_dir_dbsystem_vm_bm) if service_dir_dbsystem_vm_bm not in service_dirs else service_dirs - if opt == 'Add/Modify/Delete EXA Infra and EXA VM Clusters': - create_exa_infra_vmclusters(inputfile, outdir,service_dir_database_exacs, prefix,ct) - service_dirs.append(service_dir_database_exacs) if service_dir_database_exacs not in service_dirs else service_dirs - if opt == 'Add/Modify/Delete ADBs': - Database.create_terraform_adb(inputfile, outdir, service_dir_adb, prefix, ct) - service_dirs.append(service_dir_adb) if service_dir_adb not in service_dirs else service_dirs - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=service_dirs) - -def create_exa_infra_vmclusters(inputfile, outdir,service_dir, prefix,ct): - Database.create_terraform_exa_infra(inputfile, outdir, service_dir, prefix, ct) - Database.create_terraform_exa_vmclusters(inputfile, outdir, service_dir, prefix, ct) - -def create_management_services(options=[]): - service_dirs = [] - for opt in options: - if opt == "Add/Modify/Delete Notifications": - ManagementServices.create_terraform_notifications(inputfile, outdir, service_dir_managementservices, prefix, ct) - service_dirs = [service_dir_managementservices] - if opt == "Add/Modify/Delete Events": - ManagementServices.create_terraform_events(inputfile, outdir, service_dir_managementservices, prefix, ct) - service_dirs = [service_dir_managementservices] - if opt == "Add/Modify/Delete Alarms": - ManagementServices.create_terraform_alarms(inputfile, outdir, service_dir_managementservices, prefix, ct) - service_dirs = [service_dir_managementservices] - if opt == "Add/Modify/Delete ServiceConnectors": - ManagementServices.create_service_connectors(inputfile, outdir, service_dir_managementservices, prefix, ct) - service_dirs = [service_dir_managementservices] - - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=[service_dir_managementservices]) - - -def create_developer_services(options=[]): - for opt in options: - if opt == "Upload current terraform files/state to Resource Manager": - create_rm_stack(inputfile, outdir, prefix, auth_mechanism, config_file_path,ct) - if opt == "Add/Modify/Delete OKE Cluster and Nodepools": - create_oke(inputfile, outdir, prefix, auth_mechanism, config_file_path,ct) - - -def create_rm_stack(inputfile, outdir, prefix, auth_mechanism, config_file, ct): - regions = get_region_list(rm = True) - DeveloperServices.create_resource_manager(outdir,var_file, outdir_struct, prefix, auth_mechanism, config_file, ct, regions) - -def create_oke(inputfile, outdir, prefix, auth_mechanism, config_file, ct): - DeveloperServices.create_terraform_oke(inputfile, outdir, service_dir_oke, prefix, ct) - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=[service_dir_oke]) - - -def create_sddc(): - SDDC.create_terraform_sddc(inputfile, outdir, service_dir_sddc, prefix, ct) - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=[service_dir_sddc]) - - -def create_dns(options=[]): - service_dirs = [] - for opt in options: - if opt == 'Add/Modify/Delete DNS Views/Zones/Records': - create_terraform_dns(inputfile, outdir, service_dir_dns, prefix, ct) - service_dirs = [service_dir_dns] - if opt == 'Add/Modify/Delete DNS Resolvers': - Network.create_terraform_dns_resolvers(inputfile, outdir, service_dir_dns, prefix, ct) - service_dirs = [service_dir_dns] - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=service_dirs) - - -def create_terraform_dns(inputfile, outdir, service_dir, prefix, ct): - Network.create_terraform_dns_views(inputfile, outdir, service_dir, prefix, ct) - Network.create_terraform_dns_zones(inputfile, outdir, service_dir, prefix, ct) - Network.create_terraform_dns_rrsets(inputfile, outdir, service_dir, prefix, ct) - -def create_logging(options=[]): - service_dirs = [] - for opt in options: - if opt == 'Enable VCN Flow Logs': - ManagementServices.enable_cis_vcnflow_logging(inputfile, outdir, service_dir_network, prefix, ct) - service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs - if opt == 'Enable LBaaS Logs': - ManagementServices.enable_load_balancer_logging(inputfile, outdir, service_dir_loadbalancer, prefix, ct) - service_dirs.append(service_dir_loadbalancer) if service_dir_loadbalancer not in service_dirs else service_dirs - if opt == 'Enable Object Storage Buckets Write Logs': - ManagementServices.enable_cis_oss_logging(inputfile, outdir, service_dir_object_storage, prefix, ct) - service_dirs.append(service_dir_object_storage) if service_dir_object_storage not in service_dirs else service_dirs - - # Update modified path list - update_path_list(regions_path=subscribed_regions, service_dirs=service_dirs) - - -def create_cis_features(options=[], sub_options=[]): - service_dirs = [] - for opt in options: - if opt == 'CIS Compliance Checking Script': - initiate_cis_scan(sub_options,outdir, prefix, config_file_path) - if opt == "Create Key/Vault": - Security.create_cis_keyvault(outdir, service_dir_kms, service_dir_identity, prefix, ct, ct.vault_region, - ct.vault_comp) - service_dir = ct.vault_region+"/"+service_dir_identity - service_dirs.append(service_dir) if service_dir not in service_dirs else service_dirs - if opt == "Create Default Budget": - Governance.create_cis_budget(outdir, service_dir_budget, prefix, ct, ct.budget_amount, ct.budget_threshold) - service_dir = ct.home_region + "/" + service_dir_budget - service_dirs.append(service_dir) if service_dir not in service_dirs else service_dirs - if opt == "Enable Cloud Guard": - Security.enable_cis_cloudguard(outdir, service_dir_cloud_guard, prefix, ct, ct.cg_region) - service_dir = ct.cg_region + "/" + service_dir_cloud_guard - service_dirs.append(service_dir) if service_dir not in service_dirs else service_dirs - - # Update modified path list - update_path_list(regions_path=[""], service_dirs=service_dirs) - - -def initiate_cis_scan(sub_options,outdir, prefix, config_file): - for opt in sub_options: - if opt == "CD3 Image already contains the latest CIS compliance checking script available at the time of cd3 image release. Download latest only if new version of the script is available": - start_cis_download(outdir, prefix, config_file) - if opt == "Execute compliance checking script": - start_cis_scan(outdir, prefix, config_file) - -def start_cis_download(outdir, prefix, config_file): - print("Downloading the script file as 'cis_reports.py' at location "+os.getcwd()) - resp = requests.get("https://raw.githubusercontent.com/oracle-quickstart/oci-cis-landingzone-quickstart/main/scripts/cis_reports.py") - resp_contents = resp.text - with open("cis_reports.py", "w", encoding="utf-8") as fd: - fd.write(resp_contents) - print("Download complete!!") - -def start_cis_scan(outdir, prefix, config_file): - cmd = "python cis_reports.py" - #user_input = input("Enter command to execute the script. Press Enter to execute {} : ".format(cmd)) - #if user_input!='': - # cmd = "{}".format(user_input) - split = str.split(cmd) - - dirname = prefix + "_cis_report" - resource = "cis_report" - out_rep = outdir + '/'+ dirname - #config = "--config "+ config - commonTools.backup_file(outdir, resource, dirname) - - if not os.path.exists(out_rep): - os.makedirs(out_rep) - else: - commonTools.backup_file(outdir, resource, out_rep) - - out = ["-c", config_file, '--report-directory', out_rep] - cmd = cmd +" "+ out[0] + " "+out[1] + " "+ out[2] + " " +out[3] - split.extend(out) - print("Executing: "+cmd) - print("Scan started!") - execute(split, config_file) - -def execute(command,config_file): - export_cmd_windows = "set OCI_CONFIG_HOME="+config_file - export_cmd_linux = "export OCI_CONFIG_HOME=" + config_file - export_cmd = "" - if "linux" in sys.platform: - export_cmd = export_cmd_linux - elif "win" in sys.platform: - export_cmd = export_cmd_windows - - if export_cmd == "": - print("Failed to get OS details. Exiting!!") - exit(1) - - split_export_cmd = str.split(export_cmd) - #subprocess.Popen(split_export_cmd, stdout=subprocess.PIPE,bufsize=1) - popen = subprocess.Popen(command, stdout=subprocess.PIPE,bufsize=1) - lines_iterator = iter(popen.stdout.readline, b"") - while popen.poll() is None: - for line in lines_iterator: - nline = line.rstrip() - print(nline.decode("latin"), end="\r\n", flush=True)# yield line - - -#Execution starts here -parser = argparse.ArgumentParser(description='Sets Up OCI via TF') -parser.add_argument('propsfile', help="Full Path of properties file containing input variables. eg setUpOCI.properties") -parser.add_argument('--main_options', default="") -parser.add_argument('--sub_options', default="") -parser.add_argument('--sub_child_options', default="") -parser.add_argument('--add_filter', default=None) -args = parser.parse_args() -setUpOCI_props = configparser.RawConfigParser() -setUpOCI_props.read(args.propsfile) -main_options = args.main_options.split(",") -sub_options = args.sub_options.split(",") -sub_child_options = args.sub_child_options.split(",") - -#Read Config file Variables -try: - workflow_type = setUpOCI_props.get('Default', 'workflow_type').strip().lower() - - if (workflow_type == 'export_resources'): - non_gf_tenancy = True - else: - non_gf_tenancy = False - - inputfile = setUpOCI_props.get('Default','cd3file').strip() - outdir = setUpOCI_props.get('Default', 'outdir').strip() - prefix = setUpOCI_props.get('Default', 'prefix').strip() - auth_mechanism = setUpOCI_props.get('Default', 'auth_mechanism').strip().lower() - config_file_path = setUpOCI_props.get('Default', 'config_file').strip() or DEFAULT_LOCATION - - if not outdir: - exit_menu('input outdir location cannot be left blank. Exiting... ') - elif not prefix: - exit_menu('input prefix value cannot be left blank. Exiting... ') - elif not inputfile: - exit_menu('input cd3file location cannot be left blank. Exiting... ') - elif '.xls' not in inputfile: - exit_menu('valid formats for input cd3file are either .xls or .xlsx') -except Exception as e: - exit_menu(str(e) + ". Check input properties file and try again. Exiting... ") - -try: - outdir_structure = setUpOCI_props.get('Default', 'outdir_structure_file').strip() -except Exception as e: - outdir_structure = '' - -# Pre-work -if not os.path.exists(outdir): - os.makedirs(outdir) - -if (outdir_structure == '' or outdir_structure == "\n"): - outdir_struct = {} -else: - if os.path.isfile(outdir_structure): - outdir_config = configparser.RawConfigParser() - outdir_config.read(outdir_structure) - outdir_struct = dict(outdir_config.items("Default")) - else: - print("Invalid outdir_structure_file. Please provide correct file path. Exiting... ") - exit(1) - -## Authenticate Params -ct=None -ct = commonTools() -config,signer = ct.authenticate(auth_mechanism, config_file_path) - -# Set Export filters -export_filters = args.add_filter.split("@") if args.add_filter else [] -ct.get_export_filters(export_filters) - -## Fetch OCI_regions -cd3service = cd3Services() -cd3service.fetch_regions(config,signer) - -## Check if fetch compartments script needs to be run -run_fetch_script = 0 - -## Fetch Subscribed Regions -subscribed_regions = ct.get_subscribedregions(config,signer) -home_region = ct.home_region - -# Set service directories as per outdir_structure file -# If single outdir, get service names from /cd3user/oci_tools/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties -if len(outdir_struct.items())==0: - single_outdir_config = configparser.RawConfigParser() - single_outdir_config.read("/cd3user/oci_tools/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties") - for item,val in single_outdir_config.items("Default"): - varname = "service_dir_" + str(item.replace("-", "_")).strip() - exec(varname + "= \"\"") -# If multiple outdir, get service names from -else: - for key,value in outdir_struct.items(): - varname = "service_dir_"+str(key.replace("-","_")).strip() - exec(varname + "= value") - -var_file = (f'{outdir}/{home_region}/{service_dir_identity}/variables_{home_region}.tf').replace('//','/') - -try: - # read variables file - with open(var_file, 'r') as f: - var_data = f.read() - f.close() -except FileNotFoundError as e: - exit_menu(f'\nVariables file not found in home region - {home_region}.......Exiting!!!\n') - -## Check for the fetch compartment status -fetchcompinfo_data = "run_fetch_script=0" -try: - # read fetchcompinfo.safe - fetch_comp_file = f'{outdir}/fetchcompinfo.safe' - with open(fetch_comp_file, 'r') as f: - fetchcompinfo_data = f.read() - f.close() -except FileNotFoundError as e: - fetchcompinfo_data = "run_fetch_script=1" -if "# compartment ocids" in var_data or "run_fetch_script=1" in fetchcompinfo_data: - run_fetch_script = 1 - -if (run_fetch_script == 1): - print("Script to Fetch Compartments OCIDs to variables file has not been executed. Running it now.") - fetch_compartments(outdir,outdir_struct, ct) -else: - print("Make sure to execute the script for 'Fetch Compartments OCIDs to variables file' under 'CD3 Services' menu option at-least once before you continue!") -global updated_paths -global import_scripts -updated_paths = [] -import_scripts = [] -exec_start_time = datetime.datetime.now() - - -## Menu Options -if non_gf_tenancy: - print("\nworkflow_type set to export_resources. Export existing OCI objects and Synch with TF state") - print("We recommend to not have any existing tfvars/tfstate files for export out directory") - export_regions = get_region_list(rm=False) - for option in main_options: - if option == 'Export Identity': - export_identityOptions(options=sub_options) - if option == 'Export Tags': - export_tags(options=sub_options) - if option == 'Export Network': - export_network(options=sub_options) - if option == 'Export DNS Management': - export_dns(options=sub_options) - if option == 'Export Compute': - export_compute(options=sub_options) - if option == 'Export Storage': - export_storage(options=sub_options) - if option == 'Export Databases': - export_databases(options=sub_options) - if option == 'Export Load Balancers': - export_loadbalancer(options=sub_options) - if option == 'Export Management Services': - export_management_services(options=sub_options) - if option == 'Export Developer Services': - export_developer_services(options=sub_options) - if option == 'Export Software-Defined Data Centers - OCVS': - export_sddc() - if option == 'CD3 Services': - cd3_services(options=sub_options) -else: - export_regions = ct.all_regions - for option in main_options: - if option == 'Validate CD3': - validate_cd3(options=sub_options) - if option == 'Identity': - create_identity(options=sub_options) - if option == 'Tags': - create_tags() - if option == 'Network': - create_network(options=sub_options, sub_options=sub_child_options) - if option == 'DNS Management': - create_dns(options=sub_options) - if option == 'Compute': - create_compute(options=sub_options) - if option == 'Storage': - create_storage(options=sub_options) - if option == 'Database': - create_databases(options=sub_options) - if option == 'Load Balancers': - create_loadbalancer(options=sub_options) - if option == 'Management Services': - create_management_services(options=sub_options) - if option == 'Developer Services': - create_developer_services(options=sub_options) - if option == 'Logging Services': - create_logging(options=sub_options) - if option == 'Software-Defined Data Centers - OCVS': - create_sddc() - if option == 'CIS Compliance Features': - create_cis_features(options=sub_options,sub_options=sub_child_options) - if option == 'CD3 Services': - cd3_services(options=sub_options) - -# write updated paths to a file -updated_paths_file = f'{outdir}/updated_paths.safe' -with open(updated_paths_file, 'w+') as f: - for item in updated_paths: - f.write(str(item).replace('//','/')+"\n") -f.close() -import_scripts_file = f'{outdir}/import_scripts.safe' -with open(import_scripts_file, 'w+') as f: - for item in import_scripts: - f.write(str(item).replace('//','/')+"\n") -f.close() diff --git a/cd3_automation_toolkit/shell_script.sh b/cd3_automation_toolkit/shell_script.sh index f1dd087f7..cb7fa43c6 100644 --- a/cd3_automation_toolkit/shell_script.sh +++ b/cd3_automation_toolkit/shell_script.sh @@ -1,43 +1,51 @@ #!/bin/bash -#Create Required Directories -mkdir ~/.oci - -sudo yum-config-manager --enable ol7_developer_EPEL -sudo yum-config-manager --enable ol7_developer - -sudo yum -y install scl-utils -sudo yum -y install centos-release-scl - -sudo yum -y install rh-python38 - - -echo "source scl_source enable rh-python38" >> /cd3user/.bashrc -source /cd3user/.bashrc -python -m pip install --user --upgrade pip -#non needed -#python -m pip install --user oci==2.110.1 -python -m pip install --user oci-cli==3.37.0 -python -m pip install --user pycryptodomex==3.10.1 -python -m pip install --user regex==2022.10.31 -python -m pip install --user pandas==1.1.5 -python -m pip install --user openpyxl==3.0.7 -python -m pip install --user xlrd==1.2.0 -python -m pip install --user xlsxwriter==1.3.7 -python -m pip install --user wget==3.2 -python -m pip install --user requests==2.28.2 -python -m pip install --user netaddr==0.8.0 -python -m pip install --user cfgparse==1.3 -python -m pip install --user ipaddress==1.0.23 -python -m pip install --user Jinja2==3.1.2 -python -m pip install --user simplejson==3.18.3 -python -m pip install --user GitPython==3.1.40 -python -m pip install --user PyYAML==6.0.1 -echo "export PYTHONPATH=${PYTHONPATH}:/root/.local/lib/python3.8/site-packages/:/cd3user/.local/lib/python3.8/site-packages/:/opt/rh/rh-python38/root/usr/lib/python3.8/site-packages/" >> /cd3user/.bashrc -echo "PATH=$PATH:/cd3user/.local/bin" >> /cd3user/.bashrc -source /cd3user/.bashrc - - -yes | sudo rpm -iUvh https://yum.oracle.com/repo/OracleLinux/OL7/developer/x86_64/getPackage/terraform-1.3.0-1.el7.x86_64.rpm +# Create Required Directories +mkdir -p ~/.oci + +# Enable EPEL and Developer repositories +sudo dnf install -y oracle-epel-release-el9 +sudo dnf install -y oraclelinux-release-el9 +sudo dnf install -y procps-ng + +# Upgrade pip +sudo dnf install python-pip -y +#sudo ln -s /usr/bin/pip3 /usr/bin/pip + +# Install required Python packages +pip install --user oci-cli==3.39.0 +pip install --user pycryptodomex==3.10.1 +pip install --user regex==2022.10.31 +pip install --user pandas==1.1.5 +pip install --user openpyxl==3.0.7 +pip install --user xlrd==1.2.0 +pip install --user xlsxwriter==1.3.7 +pip install --user wget==3.2 +pip install --user requests==2.28.2 +pip install --user netaddr==0.8.0 +pip install --user cfgparse==1.3 +pip install --user ipaddress==1.0.23 +pip install --user Jinja2==3.1.2 +pip install --user simplejson==3.18.3 +pip install --user GitPython==3.1.40 +pip install --user PyYAML==6.0.1 + +# Add Python3 site-packages to PYTHONPATH +echo "export PYTHONPATH=\${PYTHONPATH}:/root/.local/lib/python3.9/site-packages/:/cd3user/.local/lib/python3.9/site-packages/" >> /cd3user/.bashrc + +# Add Python binaries to PATH +echo "PATH=\$PATH:/cd3user/.local/bin" >> /cd3user/.bashrc + + +# Download and install Terraform +#sudo dnf install -y https://yum.oracle.com/repo/OracleLinux/OL9/developer/x86_64/getPackage/terraform-1.3.6-1.el9.x86_64.rpm +sudo dnf install -y wget +sudo dnf install -y unzip +sudo wget https://releases.hashicorp.com/terraform/1.3.6/terraform_1.3.6_linux_amd64.zip +unzip terraform_1.3.6_linux_amd64.zip +sudo mv terraform /usr/local/sbin/ +sudo rm terraform_1.3.6_linux_amd64.zip + +# Download and install OPA curl -L -o opa https://openpolicyagent.org/downloads/v0.55.0/opa_linux_amd64_static -sudo chmod +x opa && sudo mv opa /usr/local/sbin/ \ No newline at end of file +sudo chmod +x opa && sudo mv opa /usr/local/sbin/ diff --git a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py index 6d62cca7c..bf3d08fbd 100644 --- a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py +++ b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py @@ -26,7 +26,6 @@ from commonTools import * from copy import deepcopy from subprocess import DEVNULL - global topic_name global project_name global repo_name @@ -149,24 +148,24 @@ def create_devops_resources(config,signer): def update_devops_config(prefix,git_config_file, repo_ssh_url,files_in_repo,dir_values,devops_user,devops_user_key,devops_dir,ct): # create git config file file = open(git_config_file, "w") - file.write("Host devops.scmservice.*.oci.oraclecloud.com\n " - "StrictHostKeyChecking no\n " - "User "+str(devops_user)+"\n " - "IdentityFile "+str(devops_user_key)+"\n") + file.write("Host devops.scmservice.*.oci"+cloud_domain+"\n " + "StrictHostKeyChecking no\n " + "User " + str(devops_user) + "\n " + "IdentityFile " + str(devops_user_key) + "\n") file.close() # copy to cd3user home dir - if not os.path.exists("/cd3user/.ssh"): - os.makedirs("/cd3user/.ssh") - shutil.copyfile(git_config_file,'/cd3user/.ssh/config') + user_ssh_dir = os.path.expanduser("~") + "/.ssh" + if not os.path.exists(user_ssh_dir): + os.makedirs(user_ssh_dir) + shutil.copyfile(git_config_file, user_ssh_dir + '/config') # change permissions of private key file and config file for GIT os.chmod(devops_user_key, 0o600) - os.chmod('/cd3user/.ssh/config', 0o600) + os.chmod(user_ssh_dir + '/config', 0o600) os.chmod(git_config_file, 0o600) - ''' # create symlink for Git Config file for SSH operations. src = git_config_file @@ -206,8 +205,10 @@ def update_devops_config(prefix,git_config_file, repo_ssh_url,files_in_repo,dir_ cfg = yaml.dump(cfg, stream=yaml_file, default_flow_style=False, sort_keys=False) # Clean repo config if exists and initiate git repo subprocess.run(['git', 'init'], cwd=devops_dir,stdout=DEVNULL) + subprocess.run(['git', 'config', '--global', 'init.defaultBranch', "main"], cwd=devops_dir) + subprocess.run(['git', 'config', '--global', 'safe.directory', devops_dir], cwd=devops_dir) f = open(devops_dir + ".gitignore", "w") - git_ignore_file_data = ".DS_Store\n*tfstate*\n*terraform*\ntfplan.out\ntfplan.json\n*backup*\ntf_import_commands*\n*cis_report*\n*.safe\n*stacks.zip\n*cd3Validator*" + git_ignore_file_data = ".DS_Store\n*tfstate*\n*terraform*\ntfplan.out\ntfplan.json\n*backup*\ntf_import_commands*\n*cis_report*\n*showoci_report*\n*.safe\n*stacks.zip\n*cd3Validator*" f.write(git_ignore_file_data) f.close() # Cleanup existing "origin" remote and create required one @@ -303,11 +304,13 @@ def create_bucket(config, signer): user_dir = "/cd3user" safe_file = user_dir + "/tenancies/createTenancyConfig.safe" auto_keys_dir = user_dir + "/tenancies/keys" -toolkit_dir = user_dir +"/oci_tools/cd3_automation_toolkit" +toolkit_dir = os.path.dirname(os.path.abspath(__file__))+"/.." +#toolkit_dir = user_dir +"/oci_tools/cd3_automation_toolkit" modules_dir = toolkit_dir + "/user-scripts/terraform" variables_example_file = modules_dir + "/variables_example.tf" setupoci_props_toolkit_file_path = toolkit_dir + "/setUpOCI.properties" -jenkins_dir = os.environ['JENKINS_INSTALL'] +if hasattr(os.environ,'JENKINS_INSTALL'): + jenkins_dir = os.environ['JENKINS_INSTALL'] prefix = config.get('Default', 'customer_name').strip() if prefix == "" or prefix == "\n": @@ -339,6 +342,7 @@ def create_bucket(config, signer): config_file_path = config_files + "/" + prefix + "_oci_config" terraform_files = customer_tenancy_dir + "/terraform_files/" +outdir_safe=terraform_files+"/.safe" setupoci_props_file_path = customer_tenancy_dir + "/" + prefix + "_setUpOCI.properties" # Read Config file Variables @@ -346,11 +350,17 @@ def create_bucket(config, signer): user='' _key_path='' fingerprint='' + cloud_domain='' tenancy = config.get('Default', 'tenancy_ocid').strip() if tenancy == "" or tenancy == "\n": print("Tenancy ID cannot be left empty...Exiting !!") exit(1) + if ("ocid1.tenancy.oc1" in tenancy): + cloud_domain=".oraclecloud.com" + else: + cloud_domain=".oraclegovcloud.com" + auth_mechanism = config.get('Default', 'auth_mechanism').strip().lower() if auth_mechanism == "" or auth_mechanism == "\n" or (auth_mechanism!='api_key' and auth_mechanism!='session_token' and auth_mechanism!='instance_principal'): @@ -441,15 +451,18 @@ def create_bucket(config, signer): os.makedirs(customer_tenancy_dir) if not os.path.exists(config_files): os.makedirs(config_files) +if not os.path.exists(outdir_safe): + os.makedirs(outdir_safe) + dir_values = [] # Copy input properties file to customer_tenancy_dir -shutil.copy(args.propsfile,config_files+"/"+prefix+"_"+args.propsfile) +shutil.copy(args.propsfile,config_files+"/"+prefix+"_"+os.path.basename(args.propsfile)) # 1. Copy outdir_structure_file # Copy default outdir_structure_file -shutil.copy('/cd3user/oci_tools/cd3_automation_toolkit/user-scripts/outdir_structure_file.properties', '/cd3user/oci_tools/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties') +shutil.copy(toolkit_dir+'/user-scripts/outdir_structure_file.properties', toolkit_dir+'/user-scripts/.outdir_structure_file.properties') _outdir_structure_file = '' if (outdir_structure_file != '' and outdir_structure_file != "\n"): @@ -575,7 +588,7 @@ def create_bucket(config, signer): cred_name = prefix+"-automation-toolkit-csk" # Get user ocid for DevOps User Name - if "ocid1.user.oc1" not in remote_state_user: + if "ocid1.user.oc" not in remote_state_user: if '@' in remote_state_user: remote_state_user = remote_state_user.rsplit("@",1)[0] @@ -667,7 +680,7 @@ def create_bucket(config, signer): elif line.__contains__("region = "): global_backend_file_data += " region = \"" + bucket_region + "\"\n" elif line.__contains__("endpoint = "): - global_backend_file_data += " endpoint = \"https://" + namespace + ".compat.objectstorage." + bucket_region + ".oraclecloud.com\"\n" + global_backend_file_data += " endpoint = \"https://" + namespace + ".compat.objectstorage." + bucket_region + cloud_domain + "\"\n" elif line.__contains__("shared_credentials_file = "): global_backend_file_data += " shared_credentials_file = \"" + s3_credential_file_path + "\"\n" else: @@ -714,8 +727,10 @@ def create_bucket(config, signer): print("Creating Tenancy specific region directories, terraform provider , variables files.................") +regions_file_data = "" for region in ct.all_regions: + regions_file_data = regions_file_data+region.title()+"\n" # Rerunning createTenancy for any new region subscription. Process only new region directories else continue if os.path.exists(terraform_files+region): continue @@ -824,9 +839,26 @@ def create_bucket(config, signer): rewrite_backend.write(new_backend_data) rewrite_backend.close() - # Manage multiple outdir + # Manage single and multiple outdir if (outdir_structure_file == '' or outdir_structure_file == "\n"): - pass + #remove depends_on for single outdir + region_dir = terraform_files + "/" + region + "/" + single_outdir_config = configparser.RawConfigParser() + single_outdir_config.read("/cd3user/oci_tools/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties") + keys = [] + for key, val in single_outdir_config.items("Default"): + keys.append(key) + for file in os.listdir(region_dir): + name=file.removesuffix(".tf") + if name in keys: + file=region_dir+"/"+file + with open(file, 'r+') as tf_file: + module_data = tf_file.read().rstrip() + module_data = module_data.replace("# depends_on", "depends_on") + tf_file.close() + f = open(file, "w+") + f.write(module_data) + f.close() else: region_dir = terraform_files + "/" + region + "/" for service, service_dir in outdir_config.items("Default"): @@ -887,10 +919,26 @@ def create_bucket(config, signer): # 8. Remove terraform example variable file from outdir os.remove(terraform_files + "/" + region + "/variables_example.tf") - # 9. Update DevOps files and configurations if use_devops == 'yes': print("\nCreating Tenancy specific DevOps Items - Topic, Project and Repository.................") + # create subscribed regions file + f = open(customer_tenancy_dir + "/.config_files/regions_file", "w+") + f.write(regions_file_data[:-1]) + f.close() + # create all compartments_file + print("Fetching existing Compartments from Tenancy...") + ct.get_network_compartment_ids(config['tenancy'], "root", config, signer) + compartments_file_data = "" + comp_done = [] + for k, v in ct.ntk_compartment_ids.items(): + if v not in comp_done: + compartments_file_data += k + "\n" + comp_done.append(v) + + f = open(customer_tenancy_dir + "/.config_files/compartments_file", "w+") + f.write(compartments_file_data[:-1]) + f.close() if devops_repo == '' or devops_repo == "\n": topic_name = prefix + "-automation-toolkit-topic" @@ -909,7 +957,7 @@ def create_bucket(config, signer): git_config_file = config_files + "/" + prefix + "_git_config" #Get Username from $user_ocid if $oci_devops_git_user is left empty - if "ocid1.user.oc1" in devops_user: + if "ocid1.user.oc" in devops_user: identity_client = oci.identity.IdentityClient(config=new_config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) diff --git a/cd3_automation_toolkit/user-scripts/tenancyconfig.properties b/cd3_automation_toolkit/user-scripts/tenancyconfig.properties index f41aedecb..d5283792f 100644 --- a/cd3_automation_toolkit/user-scripts/tenancyconfig.properties +++ b/cd3_automation_toolkit/user-scripts/tenancyconfig.properties @@ -74,15 +74,14 @@ use_oci_devops_git=no # in ${region}. oci_devops_git_repo_name= -# User Details to perform GIT operations in OCI Devops GIT Repo; Mandatory when using $(auth_mechanism) as instance_principal +# User Details to perform GIT operations in OCI Devops GIT Repo and Remote Terraform State Management; Mandatory when using $(auth_mechanism) as instance_principal # or session_token -# Format: /@ eg oracleidentitycloudservice/devopsuser@oracle.com@ocitenant +# Customer Secret Key will be created for this user for S3 credentials of the bucket. # When left empty, it will be fetched from $(user_ocid) for $(auth_mechanism) as api_key. -# Customer Secret Key will also be configured for this user for S3 credentials of the bucket when $(auth_mechanism) is -# instance_principal or session_token +# Format: /@ eg oracleidentitycloudservice/devopsuser@oracle.com@ocitenant oci_devops_git_user= + # When left empty, same key file from $(key_path) used for $(auth_mechanism) as api_key will be copied to # /cd3user/tenancies// and used for GIT Operations. -# Make sure the api key file permissions are rw(600) for cd3user oci_devops_git_key= diff --git a/cd3_automation_toolkit/user-scripts/terraform/adb.tf b/cd3_automation_toolkit/user-scripts/terraform/adb.tf index 0b02ae7d8..ead0d5a95 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/adb.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/adb.tf @@ -8,7 +8,7 @@ data "oci_core_subnets" "oci_subnets_adb" { # depends_on = [module.subnets] # Uncomment to create Network and FSS together #for_each = var.adb != null ? var.adb : {} for_each = { for k, v in var.adb : k => v if v.vcn_name != null } - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.subnet_id vcn_id = data.oci_core_vcns.oci_vcns_adb[each.key].virtual_networks.*.id[0] } @@ -17,17 +17,17 @@ data "oci_core_vcns" "oci_vcns_adb" { # depends_on = [module.vcns] # Uncomment to create Network and FSS together #for_each = var.adb != null ? var.adb : {} for_each = { for k, v in var.adb : k => v if v.vcn_name != null } - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } module "adb" { source = "./modules/database/adb" for_each = var.adb != null ? var.adb : {} - # depends_on = [module.vcns, module.subnets] + # depends_on = [module.nsgs] admin_password = each.value.admin_password character_set = each.value.character_set - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null cpu_core_count = each.value.cpu_core_count database_edition = each.value.database_edition data_storage_size_in_tbs = each.value.data_storage_size_in_tbs @@ -39,10 +39,10 @@ module "adb" { license_model = each.value.license_model ncharacter_set = each.value.ncharacter_set customer_contacts = each.value.customer_contacts - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null network_security_group_ids = each.value.nsg_ids freeform_tags = each.value.freeform_tags - subnet_id = each.value.subnet_id != null ? (length(regexall("ocid1.subnet.oc1*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_adb[each.key].subnets.*.id[0]) : null + subnet_id = each.value.subnet_id != null ? (length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_adb[each.key].subnets.*.id[0]) : null vcn_name = each.value.vcn_name != null ? each.value.vcn_name : null whitelisted_ips = each.value.whitelisted_ips diff --git a/cd3_automation_toolkit/user-scripts/terraform/block-volume.tf b/cd3_automation_toolkit/user-scripts/terraform/block-volume.tf index 687da5401..aae80e87f 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/block-volume.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/block-volume.tf @@ -8,7 +8,7 @@ data "oci_core_instances" "instance" { depends_on = [module.instances] for_each = var.blockvolumes != null ? var.blockvolumes : {} - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = one(each.value.attach_to_instance) state = "RUNNING" } @@ -18,10 +18,10 @@ module "block-volumes" { source = "./modules/storage/block-volume" for_each = var.blockvolumes != null ? var.blockvolumes : {} attachment_type = each.value.attachment_type - attach_to_instance = each.value.attach_to_instance != null ? length(regexall("ocid1.instance.oc1*", each.value.attach_to_instance)) > 0 ? each.value.attach_to_instance : merge(module.instances.*...)[each.value.attach_to_instance]["instance_tf_id"] : null + attach_to_instance = each.value.attach_to_instance != null ? length(regexall("ocid1.instance.oc*", each.value.attach_to_instance)) > 0 ? each.value.attach_to_instance : merge(module.instances.*...)[each.value.attach_to_instance]["instance_tf_id"] : null #attach_to_instance = length(each.value.attach_to_instance) > 0 ? [data.oci_core_instances.instance[each.value.display_name].instances[0].id] : [] availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : null - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null vpus_per_gb = each.value.vpus_per_gb != null ? each.value.vpus_per_gb : null device = each.value.device defined_tags = each.value.defined_tags @@ -31,7 +31,7 @@ module "block-volumes" { kms_key_id = each.value.kms_key_id size_in_gbs = each.value.size_in_gbs != null ? each.value.size_in_gbs : null block_tf_policy = each.value.backup_policy != null ? each.value.backup_policy : null - policy_tf_compartment_id = each.value.policy_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.policy_compartment_id)) > 0 ? each.value.policy_compartment_id : var.compartment_ocids[each.value.policy_compartment_id]) : null + policy_tf_compartment_id = each.value.policy_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.policy_compartment_id)) > 0 ? each.value.policy_compartment_id : var.compartment_ocids[each.value.policy_compartment_id]) : null #Volume Attachment Optional Params diff --git a/cd3_automation_toolkit/user-scripts/terraform/budget.tf b/cd3_automation_toolkit/user-scripts/terraform/budget.tf index d6d83c0e0..9a3da67a2 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/budget.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/budget.tf @@ -11,7 +11,7 @@ module "budgets" { #Required amount = each.value.amount - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid reset_period = each.value.reset_period != null ? each.value.reset_period : "MONTHLY" #Optional @@ -21,7 +21,7 @@ module "budgets" { defined_tags = each.value.defined_tags freeform_tags = each.value.freeform_tags processing_period_type = each.value.processing_period_type - #target_compartment_id = each.value.target_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.target_compartment_id)) > 0 ? each.value.target_compartment_id : var.compartment_ocids[each.value.target_compartment_id]) : null + #target_compartment_id = each.value.target_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.target_compartment_id)) > 0 ? each.value.target_compartment_id : var.compartment_ocids[each.value.target_compartment_id]) : null target_type = each.value.target_type targets = each.value.targets != null ? [var.compartment_ocids[flatten([for targets in each.value.targets : targets])[0]]] : [] @@ -32,7 +32,7 @@ module "budget-alert-rules" { for_each = var.budget_alert_rules != null ? var.budget_alert_rules : {} #Required - budget_id = each.value.budget_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.budget_id)) > 0 ? each.value.budget_id : merge(module.budgets.*...)[each.value.budget_id]["budget_tf_id"]) : null + budget_id = each.value.budget_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.budget_id)) > 0 ? each.value.budget_id : merge(module.budgets.*...)[each.value.budget_id]["budget_tf_id"]) : null threshold = each.value.threshold threshold_type = each.value.threshold_type type = each.value.type diff --git a/cd3_automation_toolkit/user-scripts/terraform/cloud-guard.tf b/cd3_automation_toolkit/user-scripts/terraform/cloud-guard.tf index 8f9a368f3..352a087c7 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/cloud-guard.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/cloud-guard.tf @@ -10,7 +10,7 @@ module "cloud-guard-configurations" { for_each = var.cloud_guard_configs != null ? var.cloud_guard_configs : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid reporting_region = each.value.reporting_region status = each.value.status @@ -25,9 +25,9 @@ module "cloud-guard-targets" { depends_on = [module.cloud-guard-configurations] #Required tenancy_ocid = var.tenancy_ocid - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid display_name = each.value.display_name - target_resource_id = each.value.target_resource_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.target_resource_id)) > 0 ? each.value.target_resource_id : var.compartment_ocids[each.value.target_resource_id]) : each.value.target_resource_id + target_resource_id = each.value.target_resource_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.target_resource_id)) > 0 ? each.value.target_resource_id : var.compartment_ocids[each.value.target_resource_id]) : each.value.target_resource_id target_resource_type = each.value.target_resource_type != null ? each.value.target_resource_type : "COMPARTMENT" prefix = each.value.prefix diff --git a/cd3_automation_toolkit/user-scripts/terraform/database-exacs.tf b/cd3_automation_toolkit/user-scripts/terraform/database-exacs.tf index 0c2f381ec..25a12d6b8 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/database-exacs.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/database-exacs.tf @@ -8,7 +8,7 @@ data "oci_core_subnets" "oci_exacs_subnets" { # depends_on = [module.subnets] # Uncomment to create Network and Instances together for_each = var.exa_vmclusters != null ? var.exa_vmclusters : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.cluster_subnet_id vcn_id = data.oci_core_vcns.oci_exacs_vcns[each.key].virtual_networks.*.id[0] } @@ -16,7 +16,7 @@ data "oci_core_subnets" "oci_exacs_subnets" { data "oci_core_subnets" "oci_exacs_backup_subnets" { # depends_on = [module.subnets] # Uncomment to create Network and Instances together for_each = var.exa_vmclusters != null ? var.exa_vmclusters : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.backup_subnet_id vcn_id = data.oci_core_vcns.oci_exacs_vcns[each.key].virtual_networks.*.id[0] } @@ -24,7 +24,7 @@ data "oci_core_subnets" "oci_exacs_backup_subnets" { data "oci_core_vcns" "oci_exacs_vcns" { # depends_on = [module.vcns] # Uncomment to create Network and Instances together for_each = var.exa_vmclusters != null ? var.exa_vmclusters : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } @@ -32,7 +32,7 @@ module "exa-infra" { source = "./modules/database/exa-infra" for_each = var.exa_infra != null ? var.exa_infra : {} availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name shape = each.value.shape compute_count = each.value.compute_count @@ -57,23 +57,23 @@ module "exa-infra" { ############################################ module "exa-vmclusters" { - depends_on = [module.exa-infra] + depends_on = [module.exa-infra, module.nsgs] source = "./modules/database/exa-vmcluster" for_each = var.exa_vmclusters != null ? var.exa_vmclusters : {} - backup_subnet_id = each.value.backup_subnet_id != "" ? (length(regexall("ocid1.subnet.oc1*", each.value.backup_subnet_id)) > 0 ? each.value.backup_subnet_id : data.oci_core_subnets.oci_exacs_backup_subnets[each.key].subnets.*.id[0]) : null - exadata_infrastructure_id = length(regexall("ocid1.cloudexadatainfrastructure.oc1*", each.value.exadata_infrastructure_id)) > 0 ? each.value.exadata_infrastructure_id : merge(module.exa-infra.*...)[each.value.exadata_infrastructure_id].exainfra_tf_id + backup_subnet_id = each.value.backup_subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.backup_subnet_id)) > 0 ? each.value.backup_subnet_id : data.oci_core_subnets.oci_exacs_backup_subnets[each.key].subnets.*.id[0]) : null + exadata_infrastructure_id = length(regexall("ocid1.cloudexadatainfrastructure.oc*", each.value.exadata_infrastructure_id)) > 0 ? each.value.exadata_infrastructure_id : merge(module.exa-infra.*...)[each.value.exadata_infrastructure_id].exainfra_tf_id cpu_core_count = each.value.cpu_core_count display_name = each.value.display_name - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null gi_version = each.value.gi_version hostname = each.value.hostname #ssh_public_keys = length(regexall("ssh-rsa*",each.value.ssh_public_key)) > 0 ? each.value.ssh_public_key : var.ssh_public_key ssh_public_keys = lookup(var.exacs_ssh_keys, each.value.ssh_public_keys, var.exacs_ssh_keys["ssh_public_key"]) - // cluster_subnet_id = length(regexall("ocid1.subnet.oc1*", each.value.cluster_subnet_id)) > 0 ? each.value.cluster_subnet_id : merge(module.subnets.*...)[each.value.cluster_subnet_id]["subnet_tf_id"] - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + // cluster_subnet_id = length(regexall("ocid1.subnet.oc*", each.value.cluster_subnet_id)) > 0 ? each.value.cluster_subnet_id : merge(module.subnets.*...)[each.value.cluster_subnet_id]["subnet_tf_id"] + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null vcn_names = [each.value.vcn_name] - cluster_subnet_id = each.value.cluster_subnet_id != "" ? (length(regexall("ocid1.subnet.oc1*", each.value.cluster_subnet_id)) > 0 ? each.value.cluster_subnet_id : data.oci_core_subnets.oci_exacs_subnets[each.key].subnets.*.id[0]) : null + cluster_subnet_id = each.value.cluster_subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.cluster_subnet_id)) > 0 ? each.value.cluster_subnet_id : data.oci_core_subnets.oci_exacs_subnets[each.key].subnets.*.id[0]) : null backup_network_nsg_ids = each.value.backup_network_nsg_ids != null ? each.value.backup_network_nsg_ids : [] cluster_name = each.value.cluster_name data_storage_percentage = each.value.data_storage_percentage @@ -87,7 +87,7 @@ module "exa-vmclusters" { is_local_backup_enabled = each.value.is_local_backup_enabled is_sparse_diskgroup_enabled = each.value.is_sparse_diskgroup_enabled license_model = each.value.license_model - // nsg_ids = each.value.nsg_ids != null ? [for nsg in each.value.nsg_ids : length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? nsg : merge(module.nsgs.*...)[nsg]["nsg_tf_id"]] : null + // nsg_ids = each.value.nsg_ids != null ? [for nsg in each.value.nsg_ids : length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? nsg : merge(module.nsgs.*...)[nsg]["nsg_tf_id"]] : null nsg_ids = each.value.nsg_ids != null ? each.value.nsg_ids : [] ocpu_count = each.value.ocpu_count scan_listener_port_tcp = each.value.scan_listener_port_tcp diff --git a/cd3_automation_toolkit/user-scripts/terraform/dbsystem-vm-bm.tf b/cd3_automation_toolkit/user-scripts/terraform/dbsystem-vm-bm.tf index 1573c85e7..8cffe73e2 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/dbsystem-vm-bm.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/dbsystem-vm-bm.tf @@ -7,7 +7,7 @@ data "oci_core_subnets" "oci_dbsystems_subnets" { # depends_on = [module.subnets] # Uncomment to create Network and Instances together for_each = var.dbsystems_vm_bm != null ? var.dbsystems_vm_bm : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.subnet_id vcn_id = data.oci_core_vcns.oci_dbsystems_vcns[each.key].virtual_networks.*.id[0] } @@ -15,16 +15,16 @@ data "oci_core_subnets" "oci_dbsystems_subnets" { data "oci_core_vcns" "oci_dbsystems_vcns" { # depends_on = [module.vcns] # Uncomment to create Network and Instances together for_each = var.dbsystems_vm_bm != null ? var.dbsystems_vm_bm : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } module "dbsystems-vm-bm" { source = "./modules/database/dbsystem-vm-bm" - + # depends_on = [module.nsgs] # Uncomment to create NSG and DB Systems together for_each = var.dbsystems_vm_bm != null ? var.dbsystems_vm_bm : {} availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null hostname = each.value.hostname display_name = each.value.display_name db_version = each.value.db_version @@ -32,9 +32,9 @@ module "dbsystems-vm-bm" { shape = each.value.shape #ssh_public_key = length(regexall("ssh-rsa*",each.value.ssh_public_key)) > 0 ? each.value.ssh_public_key : var.ssh_public_key ssh_public_keys = lookup(var.dbsystem_ssh_keys, each.value.ssh_public_keys, var.dbsystem_ssh_keys["ssh_public_key"]) - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null vcn_names = [each.value.vcn_name] - subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc1*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_dbsystems_subnets[each.key].subnets.*.id[0]) : null + subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_dbsystems_subnets[each.key].subnets.*.id[0]) : null node_count = each.value.node_count nsg_ids = each.value.nsg_ids != null ? each.value.nsg_ids : [] diff --git a/cd3_automation_toolkit/user-scripts/terraform/dedicated-vm-host.tf b/cd3_automation_toolkit/user-scripts/terraform/dedicated-vm-host.tf index 4e88ac0ae..74eb287de 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/dedicated-vm-host.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/dedicated-vm-host.tf @@ -10,7 +10,7 @@ module "dedicated-hosts" { for_each = var.dedicated_hosts != null ? var.dedicated_hosts : {} availability_domain = each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : null - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null defined_tags = each.value.defined_tags freeform_tags = each.value.freeform_tags vm_host_shape = each.value.vm_host_shape diff --git a/cd3_automation_toolkit/user-scripts/terraform/dns.tf b/cd3_automation_toolkit/user-scripts/terraform/dns.tf index caa513c18..4175ac5fd 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/dns.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/dns.tf @@ -11,7 +11,7 @@ data "oci_core_vcn_dns_resolver_association" "resolver_vcn_dns_resolver_associat data "oci_core_vcns" "resolver_oci_vcns" { # depends_on = [module.vcns] # Uncomment to create resolver and vcn together for_each = var.resolvers != null ? var.resolvers : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } @@ -35,7 +35,7 @@ locals { data "oci_core_subnets" "resolver_oci_subnets" { # depends_on = [module.subnets] # Uncomment to create resolver and subnets together for_each = { for sn in local.subnets : "${sn.endpoint_name}_${sn.subnet_name}" => sn } - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.subnet_name vcn_id = data.oci_core_vcns.resolver_oci_vcns[each.value.resolver_key].virtual_networks.*.id[0] } @@ -58,7 +58,7 @@ locals { } data "oci_core_network_security_groups" "resolver_network_security_groups" { for_each = { for nsg in local.nsgs : "${nsg.endpoint_name}_${nsg.nsg_name}" => nsg } - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.nsg_name vcn_id = data.oci_core_vcns.resolver_oci_vcns[each.value.resolver_key].virtual_networks.*.id[0] } @@ -79,7 +79,7 @@ locals { data "oci_dns_views" "resolver_views_data" { #Required for_each = { for rv in local.resolver_views : "${rv.view_key}" => rv } - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.view_compartment)) > 0 ? each.value.view_compartment : var.compartment_ocids[each.value.view_compartment] + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.view_compartment)) > 0 ? each.value.view_compartment : var.compartment_ocids[each.value.view_compartment] scope = "PRIVATE" #Optional display_name = each.value.view_name @@ -89,13 +89,14 @@ data "oci_dns_views" "resolver_views_data" { ### Module ### module "dns-resolvers" { source = "./modules/network/dns/dns_resolver" + # depends_on = [module.nsgs] # Uncomment to create NSG and DNS Resolvers together for_each = var.resolvers != null ? var.resolvers : {} target_resolver_id = data.oci_core_vcn_dns_resolver_association.resolver_vcn_dns_resolver_association[each.key].*.dns_resolver_id[0] resolver_scope = "PRIVATE" resolver_display_name = each.value.display_name != null ? each.value.display_name : null views = each.value.views != null ? { for v_key, view in each.value.views : v_key => { - view_id = length(regexall("ocid1.dnsview.oc1*", view.view_id)) > 0 ? view.view_id : try(data.oci_dns_views.resolver_views_data["${v_key}"].views.*.id[0], module.dns-views[view.view_id]["dns_view_id"]) + view_id = length(regexall("ocid1.dnsview.oc*", view.view_id)) > 0 ? view.view_id : try(data.oci_dns_views.resolver_views_data["${v_key}"].views.*.id[0], module.dns-views[view.view_id]["dns_view_id"]) } } : null @@ -108,14 +109,14 @@ module "dns-resolvers" { listening = endpoint.is_listening name = endpoint.name #resolver_id = oci_dns_resolver.test_resolver.id - subnet_id = length(regexall("ocid1.subnet.oc1*", endpoint.subnet_name)) > 0 ? endpoint.subnet_name : data.oci_core_subnets.resolver_oci_subnets["${endpoint.name}_${endpoint.subnet_name}"].subnets.*.id[0] + subnet_id = length(regexall("ocid1.subnet.oc*", endpoint.subnet_name)) > 0 ? endpoint.subnet_name : data.oci_core_subnets.resolver_oci_subnets["${endpoint.name}_${endpoint.subnet_name}"].subnets.*.id[0] scope = "PRIVATE" #Optional endpoint_type = "VNIC" forwarding_address = endpoint.forwarding_address listening_address = endpoint.listening_address - nsg_ids = endpoint.nsg_ids != null ? flatten(tolist([for nsg in endpoint.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.resolver_network_security_groups["${endpoint.name}_${nsg}"].network_security_groups[*].id)])) : null + nsg_ids = endpoint.nsg_ids != null ? flatten(tolist([for nsg in endpoint.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.resolver_network_security_groups["${endpoint.name}_${nsg}"].network_security_groups[*].id)])) : null } } : null @@ -128,7 +129,7 @@ module "dns-resolvers" { data "oci_dns_views" "rrset_views_data" { #Required for_each = var.rrsets - compartment_id = each.value.view_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.view_compartment_id)) > 0 ? each.value.view_compartment_id : var.compartment_ocids[each.value.view_compartment_id]) : null + compartment_id = each.value.view_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.view_compartment_id)) > 0 ? each.value.view_compartment_id : var.compartment_ocids[each.value.view_compartment_id]) : null scope = "PRIVATE" #Optional @@ -138,13 +139,13 @@ data "oci_dns_views" "rrset_views_data" { data "oci_dns_zones" "rrset_zones_data" { for_each = { for k, v in var.rrsets : k => v if try(data.oci_dns_views.rrset_views_data[k].views.*.id[0], 0) != 0 } - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null #Optional name = each.value.zone_id scope = "PRIVATE" state = "ACTIVE" - view_id = length(regexall("ocid1.dnsview.oc1*", each.value.view_id)) > 0 ? each.value.view_id : try(data.oci_dns_views.rrset_views_data[each.key].views.*.id[0], module.dns-views[each.value.view_id]["dns_view_id"]) + view_id = length(regexall("ocid1.dnsview.oc*", each.value.view_id)) > 0 ? each.value.view_id : try(data.oci_dns_views.rrset_views_data[each.key].views.*.id[0], module.dns-views[each.value.view_id]["dns_view_id"]) } module "dns-rrsets" { @@ -152,12 +153,12 @@ module "dns-rrsets" { for_each = var.rrsets != null ? var.rrsets : {} depends_on = [module.dns-views, module.dns-zones] rrset_zone = try(data.oci_dns_zones.rrset_zones_data[each.key].zones.*.id[0], module.dns-zones[join("_", [each.value.view_id, replace(each.value.zone_id, ".", "_")])]["dns_zone_id"]) - #rrset_view_id = each.value.view_id != "" ? (length(regexall("ocid1.dnsview.oc1*", each.value.view_id)) > 0 ? each.value.view_id : data.oci_dns_views.rrset_views_data[each.key].views.*.id[0]) : null - rrset_view_id = length(regexall("ocid1.dnsview.oc1*", each.value.view_id)) > 0 ? each.value.view_id : try(data.oci_dns_views.rrset_views_data[each.key].views.*.id[0], module.dns-views[each.value.view_id]["dns_view_id"]) + #rrset_view_id = each.value.view_id != "" ? (length(regexall("ocid1.dnsview.oc*", each.value.view_id)) > 0 ? each.value.view_id : data.oci_dns_views.rrset_views_data[each.key].views.*.id[0]) : null + rrset_view_id = length(regexall("ocid1.dnsview.oc*", each.value.view_id)) > 0 ? each.value.view_id : try(data.oci_dns_views.rrset_views_data[each.key].views.*.id[0], module.dns-views[each.value.view_id]["dns_view_id"]) rrset_domain = each.value.domain rrset_rtype = each.value.rtype rrset_ttl = each.value.ttl - #rrset_compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + #rrset_compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null rrset_rdata = each.value.rdata rrset_scope = "PRIVATE" @@ -176,7 +177,7 @@ module "dns-rrsets" { data "oci_dns_views" "zone_views_data" { #Required for_each = { for k, v in var.zones : k => v if v.view_id != null } - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.view_compartment_id)) > 0 ? each.value.view_compartment_id : var.compartment_ocids[each.value.view_compartment_id] + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.view_compartment_id)) > 0 ? each.value.view_compartment_id : var.compartment_ocids[each.value.view_compartment_id] scope = "PRIVATE" display_name = each.value.view_id state = "ACTIVE" @@ -186,14 +187,14 @@ module "dns-zones" { source = "./modules/network/dns/zone" depends_on = [module.dns-views] for_each = { for k, v in var.zones : k => v if var.zones != null } - zone_compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id] + zone_compartment_id = length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id] zone_name = each.value.display_name zone_type = "PRIMARY" zone_defined_tags = try(each.value.defined_tags, null) zone_freeform_tags = try(each.value.freeform_tags, null) #external_masters = each.value.external_masters != null ? each.value.external_masters : {} zone_scope = "PRIVATE" - view_id = length(regexall("ocid1.dnsview.oc1*", each.value.view_id)) > 0 ? each.value.view_id : try(data.oci_dns_views.zone_views_data[each.key].views.*.id[0], module.dns-views[each.value.view_id]["dns_view_id"]) + view_id = length(regexall("ocid1.dnsview.oc*", each.value.view_id)) > 0 ? each.value.view_id : try(data.oci_dns_views.zone_views_data[each.key].views.*.id[0], module.dns-views[each.value.view_id]["dns_view_id"]) } ################# @@ -203,7 +204,7 @@ module "dns-zones" { module "dns-views" { source = "./modules/network/dns/view" for_each = var.views != null ? var.views : {} - view_compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + view_compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null view_display_name = each.value.display_name view_scope = try((each.value.scope != null ? (each.value.scope == "PRIVATE" ? each.value.scope : null) : null), null) view_defined_tags = try(each.value.defined_tags, null) diff --git a/cd3_automation_toolkit/user-scripts/terraform/firewall.tf b/cd3_automation_toolkit/user-scripts/terraform/firewall.tf index cee81ada4..21e8d44d5 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/firewall.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/firewall.tf @@ -1,167 +1,167 @@ data "oci_core_vcns" "firewall_vcns" { - for_each = var.firewalls != null ? var.firewalls : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] - display_name = each.value.vcn_name + for_each = var.firewalls != null ? var.firewalls : {} + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + display_name = each.value.vcn_name } data "oci_core_subnets" "firewall_subnets" { - for_each = var.firewalls != null ? var.firewalls : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] - display_name = each.value.subnet_id - vcn_id = data.oci_core_vcns.firewall_vcns[each.key].virtual_networks.*.id[0] + for_each = var.firewalls != null ? var.firewalls : {} + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + display_name = each.value.subnet_id + vcn_id = data.oci_core_vcns.firewall_vcns[each.key].virtual_networks.*.id[0] } module "firewalls" { - source = "./modules/security/firewall/firewall" - for_each = var.firewalls != null ? var.firewalls :{} - depends_on = [module.policies,module.address_lists, module.application_groups, module.applications, module.services, module.service_lists, module.url_lists,module.decryption_profiles, module.secrets,module.security_rules,module.decryption_rules] - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.compartment_ocids[each.value.compartment_id] - network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*",each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc1*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.firewall_subnets[each.key].subnets.*.id[0]) : null - display_name = each.value.display_name - ipv4address = each.value.ipv4address - ipv6address = each.value.ipv6address - availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" - nsg_id = each.value.nsg_id - vcn_name = each.value.vcn_name - defined_tags = each.value.defined_tags - freeform_tags = each.value.freeform_tags + source = "./modules/security/firewall/firewall" + for_each = var.firewalls != null ? var.firewalls : {} + depends_on = [module.policies, module.address_lists, module.application_groups, module.applications, module.services, module.service_lists, module.url_lists, module.decryption_profiles, module.secrets, module.security_rules, module.decryption_rules] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.compartment_ocids[each.value.compartment_id] + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.firewall_subnets[each.key].subnets.*.id[0]) : null + display_name = each.value.display_name + ipv4address = each.value.ipv4address + ipv6address = each.value.ipv6address + availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" + nsg_id = each.value.nsg_id + vcn_name = each.value.vcn_name + defined_tags = each.value.defined_tags + freeform_tags = each.value.freeform_tags } module "policies" { - source = "./modules/security/firewall/firewall-policy" - for_each = var.fw-policies != null ? var.fw-policies :{} - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.compartment_ocids[each.value.compartment_id] + source = "./modules/security/firewall/firewall-policy" + for_each = var.fw-policies != null ? var.fw-policies : {} + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.compartment_ocids[each.value.compartment_id] display_name = each.value.display_name - defined_tags = each.value.defined_tags - freeform_tags = each.value.freeform_tags - } + defined_tags = each.value.defined_tags + freeform_tags = each.value.freeform_tags +} module "services" { - source = "./modules/security/firewall/service" - for_each = var.services != null ? var.services :{} - depends_on = [module.policies] - service_name = each.value.service_name - network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*",each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - service_type = each.value.service_type - port_ranges = each.value.port_ranges + source = "./modules/security/firewall/service" + for_each = var.services != null ? var.services : {} + depends_on = [module.policies] + service_name = each.value.service_name + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + service_type = each.value.service_type + port_ranges = each.value.port_ranges } module "service_lists" { source = "./modules/security/firewall/service-list" for_each = var.service_lists != null ? var.service_lists : {} - depends_on = [module.services,module.policies] + depends_on = [module.services, module.policies] service_list_name = each.value.service_list_name network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - services = each.value.services != null ? flatten(tolist([for sid in each.value.services : (length(regexall("ocid1.networkfirewallpolicy.oc1*", sid)) > 0 ? merge(module.services.*...)[sid]["service+_tf_id"] :[sid] )])) : null + services = each.value.services != null ? flatten(tolist([for sid in each.value.services : (length(regexall("ocid1.networkfirewallpolicy.oc*", sid)) > 0 ? merge(module.services.*...)[sid]["service+_tf_id"] : [sid])])) : null } -module address_lists { +module "address_lists" { source = "./modules/security/firewall/address-list" for_each = var.address_lists != null ? var.address_lists : {} - depends_on = [module.policies] - address_list_name = each.value.address_list_name + depends_on = [module.policies] + address_list_name = each.value.address_list_name network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - address_type = each.value.address_type - addresses = each.value.addresses + address_type = each.value.address_type + addresses = each.value.addresses } -module applications { +module "applications" { source = "./modules/security/firewall/application" for_each = var.applications != null ? var.applications : {} - depends_on = [module.policies] - icmp_type = each.value.icmp_type - app_list_name = each.value.app_list_name + depends_on = [module.policies] + icmp_type = each.value.icmp_type + app_list_name = each.value.app_list_name network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - app_type = each.value.app_type - icmp_code = each.value.icmp_code + app_type = each.value.app_type + icmp_code = each.value.icmp_code } -module application_groups { +module "application_groups" { source = "./modules/security/firewall/application-group" for_each = var.application_groups != null ? var.application_groups : {} - depends_on = [module.policies,module.applications] - app_group_name = each.value.app_group_name + depends_on = [module.policies, module.applications] + app_group_name = each.value.app_group_name network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - apps = each.value.apps != null ? flatten(tolist([for app in each.value.apps : (length(regexall("ocid1.networkfirewallpolicy.oc1*", app)) > 0 ? merge(module.applications.*...)[app]["application_tf_id"] :[app] )])) : null + apps = each.value.apps != null ? flatten(tolist([for app in each.value.apps : (length(regexall("ocid1.networkfirewallpolicy.oc*", app)) > 0 ? merge(module.applications.*...)[app]["application_tf_id"] : [app])])) : null } -module url_lists { +module "url_lists" { source = "./modules/security/firewall/url-list" for_each = var.url_lists != null ? var.url_lists : {} - depends_on = [module.policies] - urllist_name = each.value.urllist_name - network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*",each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + depends_on = [module.policies] + urllist_name = each.value.urllist_name + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] #key_name = each.key urls_details = each.value.urls } -module security_rules { +module "security_rules" { source = "./modules/security/firewall/security-rules" for_each = var.security_rules != null ? var.security_rules : {} - depends_on = [module.policies, module.address_lists, module.application_groups, module.applications, module.services, module.service_lists, module.url_lists] - action = each.value.action - rule_name = each.value.rule_name - network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*",each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - application = each.value.condition[0].application != null ? each.value.condition[0].application : [] - url = each.value.condition[0].url != null ? each.value.condition[0].url : [] - service = each.value.condition[0].service != null ? each.value.condition[0].service : [] - source_address = each.value.condition[0].source_address != null ? each.value.condition[0].source_address : [] - destination_address = each.value.condition[0].destination_address != null ? each.value.condition[0].destination_address : [] + depends_on = [module.policies, module.address_lists, module.application_groups, module.applications, module.services, module.service_lists, module.url_lists] + action = each.value.action + rule_name = each.value.rule_name + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + application = each.value.condition[0].application != null ? each.value.condition[0].application : [] + url = each.value.condition[0].url != null ? each.value.condition[0].url : [] + service = each.value.condition[0].service != null ? each.value.condition[0].service : [] + source_address = each.value.condition[0].source_address != null ? each.value.condition[0].source_address : [] + destination_address = each.value.condition[0].destination_address != null ? each.value.condition[0].destination_address : [] /*application = each.value.condition != null ? each.value.condition.application : [] url = each.value.condition != null ? each.value.condition.url : [] service = each.value.condition != null ? each.value.condition.service : [] source_address = each.value.condition != null ? each.value.condition.source_address : [] destination_address = each.value.condition != null ? each.value.condition.destination_address : []*/ - inspection = each.value.inspection - after_rule = each.value.after_rule + inspection = each.value.inspection + after_rule = each.value.after_rule before_rule = each.value.before_rule } -module secrets { - source = "./modules/security/firewall/secret" - for_each = var.secrets != null || var.secrets != {} ? var.secrets : {} - depends_on = [module.policies] - secret_name = each.value.secret_name - network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*",each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - secret_source = each.value.secret_source - secret_type = each.value.secret_type - vault_secret_id = each.value.vault_secret_id - vault_name = each.value.vault_name - compartment_id = each.value.vault_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.vault_compartment_id)) > 0 ? each.value.vault_compartment_id : var.compartment_ocids[each.value.vault_compartment_id]) : var.compartment_ocids[each.value.vault_compartment_id] - version_number = each.value.version_number +module "secrets" { + source = "./modules/security/firewall/secret" + for_each = var.secrets != null || var.secrets != {} ? var.secrets : {} + depends_on = [module.policies] + secret_name = each.value.secret_name + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + secret_source = each.value.secret_source + secret_type = each.value.secret_type + vault_secret_id = each.value.vault_secret_id + vault_name = each.value.vault_name + compartment_id = each.value.vault_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.vault_compartment_id)) > 0 ? each.value.vault_compartment_id : var.compartment_ocids[each.value.vault_compartment_id]) : var.compartment_ocids[each.value.vault_compartment_id] + version_number = each.value.version_number } -module decryption_profiles { - source = "./modules/security/firewall/decryption-profile" - for_each = var.decryption_profiles != null || var.decryption_profiles != {} ? var.decryption_profiles : {} - depends_on = [module.policies, module.secrets] - profile_name = each.value.profile_name - network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*",each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - profile_type = each.value.profile_type +module "decryption_profiles" { + source = "./modules/security/firewall/decryption-profile" + for_each = var.decryption_profiles != null || var.decryption_profiles != {} ? var.decryption_profiles : {} + depends_on = [module.policies, module.secrets] + profile_name = each.value.profile_name + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + profile_type = each.value.profile_type are_certificate_extensions_restricted = each.value.are_certificate_extensions_restricted - is_auto_include_alt_name = each.value.is_auto_include_alt_name - is_expired_certificate_blocked = each.value.is_expired_certificate_blocked - is_out_of_capacity_blocked =each.value.is_out_of_capacity_blocked - is_revocation_status_timeout_blocked = each.value.is_revocation_status_timeout_blocked - is_unknown_revocation_status_blocked = each.value.is_unknown_revocation_status_blocked - is_unsupported_cipher_blocked = each.value.is_unsupported_cipher_blocked - is_unsupported_version_blocked = each.value.is_unsupported_version_blocked - is_untrusted_issuer_blocked = each.value.is_untrusted_issuer_blocked + is_auto_include_alt_name = each.value.is_auto_include_alt_name + is_expired_certificate_blocked = each.value.is_expired_certificate_blocked + is_out_of_capacity_blocked = each.value.is_out_of_capacity_blocked + is_revocation_status_timeout_blocked = each.value.is_revocation_status_timeout_blocked + is_unknown_revocation_status_blocked = each.value.is_unknown_revocation_status_blocked + is_unsupported_cipher_blocked = each.value.is_unsupported_cipher_blocked + is_unsupported_version_blocked = each.value.is_unsupported_version_blocked + is_untrusted_issuer_blocked = each.value.is_untrusted_issuer_blocked } -module decryption_rules { +module "decryption_rules" { source = "./modules/security/firewall/decryption-rules" for_each = var.decryption_rules != null ? var.decryption_rules : {} - depends_on = [module.policies, module.decryption_profiles, module.secrets, module.address_lists] - action = each.value.action - rule_name = each.value.rule_name - network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*",each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] - source_address = each.value.condition[0].source_address != null ? each.value.condition[0].source_address : [] - destination_address = each.value.condition[0].destination_address != null ? each.value.condition[0].destination_address : [] - after_rule = each.value.after_rule - before_rule = each.value.before_rule - decryption_profile = each.value.decryption_profile - secret = each.value.secret + depends_on = [module.policies, module.decryption_profiles, module.secrets, module.address_lists] + action = each.value.action + rule_name = each.value.rule_name + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + source_address = each.value.condition[0].source_address != null ? each.value.condition[0].source_address : [] + destination_address = each.value.condition[0].destination_address != null ? each.value.condition[0].destination_address : [] + after_rule = each.value.after_rule + before_rule = each.value.before_rule + decryption_profile = each.value.decryption_profile + secret = each.value.secret } diff --git a/cd3_automation_toolkit/user-scripts/terraform/fss.tf b/cd3_automation_toolkit/user-scripts/terraform/fss.tf index eee47fe20..65d54a225 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/fss.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/fss.tf @@ -8,7 +8,7 @@ data "oci_core_subnets" "oci_subnets_fss" { # depends_on = [module.subnets] # Uncomment to create Network and FSS together for_each = var.mount_targets != null ? var.mount_targets : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.subnet_id vcn_id = data.oci_core_vcns.oci_vcns_fss[each.key].virtual_networks.*.id[0] } @@ -16,20 +16,20 @@ data "oci_core_subnets" "oci_subnets_fss" { data "oci_core_vcns" "oci_vcns_fss" { # depends_on = [module.vcns] # Uncomment to create Network and FSS together for_each = var.mount_targets != null ? var.mount_targets : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } module "mts" { - # depends_on = [module.vcns, module.subnets] # Uncomment to execute Networking and Mount Target together + # depends_on = [module.nsgs]] # Uncomment to execute NSG and Mount Target together #Required source = "./modules/storage/file-storage/mount-target" for_each = (var.mount_targets != null || var.mount_targets != {}) ? var.mount_targets : {} #Required availability_domain = each.value.availability_domain != null && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : null - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] - subnet_id = length(regexall("ocid1.subnet.oc1*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_fss[each.key].subnets.*.id[0] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + subnet_id = length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_fss[each.key].subnets.*.id[0] vcn_names = [each.value.vcn_name] #Optional @@ -38,8 +38,8 @@ module "mts" { freeform_tags = each.value.freeform_tags hostname_label = each.value.hostname_label ip_address = each.value.ip_address - #nsg_ids = [for nsg in each.value.nsg_ids : length(regexall("ocid1.networksecuritygroup.oc1*",nsg)) > 0 ? nsg : merge(module.nsgs.*...)[nsg]["nsg_tf_id"]] - #nsg_ids = each.value.nsg_ids == [] ? null : ([for nsg in each.value.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*",nsg)) > 0 ? nsg : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)]) + #nsg_ids = [for nsg in each.value.nsg_ids : length(regexall("ocid1.networksecuritygroup.oc*",nsg)) > 0 ? nsg : merge(module.nsgs.*...)[nsg]["nsg_tf_id"]] + #nsg_ids = each.value.nsg_ids == [] ? null : ([for nsg in each.value.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*",nsg)) > 0 ? nsg : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)]) network_security_group_ids = each.value.nsg_ids } @@ -51,7 +51,7 @@ module "fss" { #Required availability_domain = each.value.availability_domain != null && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : null - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null #Optional defined_tags = each.value.defined_tags @@ -67,8 +67,8 @@ module "fss-export-options" { for_each = (var.nfs_export_options != null || var.nfs_export_options != {}) ? var.nfs_export_options : {} #Required - export_set_id = length(regexall("ocid1.mounttarget.oc1*", each.value.export_set_id)) > 0 ? each.value.export_set_id : merge(module.mts.*...)[each.value.export_set_id]["mt_exp_set_id"] - file_system_id = length(regexall("ocid1.filesystem.oc1*", each.value.file_system_id)) > 0 ? each.value.file_system_id : merge(module.fss.*...)[each.value.file_system_id]["fss_tf_id"] + export_set_id = length(regexall("ocid1.mounttarget.oc*", each.value.export_set_id)) > 0 ? each.value.export_set_id : merge(module.mts.*...)[each.value.export_set_id]["mt_exp_set_id"] + file_system_id = length(regexall("ocid1.filesystem.oc*", each.value.file_system_id)) > 0 ? each.value.file_system_id : merge(module.fss.*...)[each.value.file_system_id]["fss_tf_id"] export_path = each.value.path nfs_export_options = var.nfs_export_options key_name = each.key diff --git a/cd3_automation_toolkit/user-scripts/terraform/identity.tf b/cd3_automation_toolkit/user-scripts/terraform/identity.tf index d810b0cf1..a83c307e9 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/identity.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/identity.tf @@ -11,7 +11,7 @@ module "iam-compartments" { # insert the 4 required variables here tenancy_ocid = var.tenancy_ocid - compartment_id = each.value.parent_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id])) : var.tenancy_ocid + compartment_id = each.value.parent_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id])) : var.tenancy_ocid compartment_name = each.value.name compartment_description = each.value.description enable_delete = each.value.enable_delete @@ -28,7 +28,7 @@ module "sub-compartments-level1" { depends_on = [module.iam-compartments] # insert the 4 required variables here tenancy_ocid = var.tenancy_ocid - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.iam-compartments.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.iam-compartments.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) compartment_name = each.value.name compartment_description = each.value.description enable_delete = each.value.enable_delete @@ -45,7 +45,7 @@ module "sub-compartments-level2" { depends_on = [module.sub-compartments-level1] # insert the 4 required variables here tenancy_ocid = var.tenancy_ocid - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level1.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level1.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) compartment_name = each.value.name compartment_description = each.value.description @@ -63,7 +63,7 @@ module "sub-compartments-level3" { depends_on = [module.sub-compartments-level2] # insert the 4 required variables here tenancy_ocid = var.tenancy_ocid - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level2.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level2.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) compartment_name = each.value.name compartment_description = each.value.description enable_delete = each.value.enable_delete @@ -80,7 +80,7 @@ module "sub-compartments-level4" { depends_on = [module.sub-compartments-level3] # insert the 4 required variables here tenancy_ocid = var.tenancy_ocid - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level3.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level3.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) compartment_name = each.value.name compartment_description = each.value.description enable_delete = each.value.enable_delete @@ -97,7 +97,7 @@ module "sub-compartments-level5" { depends_on = [module.sub-compartments-level4] # insert the 4 required variables here tenancy_ocid = var.tenancy_ocid - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level4.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.parent_compartment_id)) > 0 ? each.value.parent_compartment_id : try(merge(module.sub-compartments-level4.*...)[each.value.parent_compartment_id]["compartment_tf_id"], var.compartment_ocids[each.value.parent_compartment_id], zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.parent_compartment_id]) compartment_name = each.value.name compartment_description = each.value.description enable_delete = each.value.enable_delete @@ -194,7 +194,7 @@ module "iam-policies" { depends_on = [module.iam-groups] tenancy_ocid = var.tenancy_ocid policy_name = each.value.name - policy_compartment_id = each.value.compartment_id != "root" ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid + policy_compartment_id = each.value.compartment_id != "root" ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid policy_description = each.value.policy_description policy_statements = each.value.policy_statements @@ -223,7 +223,7 @@ module "iam-users" { user_description = each.value.description user_email = each.value.email group_membership = each.value.group_membership != null ? each.value.group_membership : null - #group_membership = each.value.group_membership != null ? length(regexall("ocid1.groupmembership.oc1*", each.value.group_membership.0)) > 0 ? each.value.group_membership.0 : merge(module.iam-groups.*...)[each.value.group_membership.0]["group_tf_id"] : null + #group_membership = each.value.group_membership != null ? length(regexall("ocid1.groupmembership.oc*", each.value.group_membership.0)) > 0 ? each.value.group_membership.0 : merge(module.iam-groups.*...)[each.value.group_membership.0]["group_tf_id"] : null tenancy_ocid = var.tenancy_ocid disable_capabilities = each.value.disable_capabilities != null ? each.value.disable_capabilities : null @@ -249,7 +249,7 @@ module "iam-users" { # network_compartment = virtual_source.network_compartment_id.0 # } # ] - #]) +#]) #} #data "oci_core_vcns" "oci_vcns_networksource" { @@ -260,21 +260,21 @@ module "iam-users" { #} module "iam-network-sources" { - source = "./modules/identity/iam-network-sources" - for_each = var.networkSources - name = each.value.name - description = each.value.description - tenancy_ocid = var.tenancy_ocid + source = "./modules/identity/iam-network-sources" + for_each = var.networkSources + name = each.value.name + description = each.value.description + tenancy_ocid = var.tenancy_ocid #Optional - public_source_list = each.value.public_source_list != null ? each.value.public_source_list : null + public_source_list = each.value.public_source_list != null ? each.value.public_source_list : null #virtual_source_list = each.value.virtual_source_list != null ? each.value.virtual_source_list : null - virtual_source_list = { for k,v in each.value.virtual_source_list != null ? each.value.virtual_source_list : [] : k => - { - #vcn_id = data.oci_core_vcns.oci_vcns_networksource[v.vcn_name.0].virtual_networks.*.id[0] - ip_ranges = v.ip_ranges - }} + virtual_source_list = { for k, v in each.value.virtual_source_list != null ? each.value.virtual_source_list : [] : k => + { + #vcn_id = data.oci_core_vcns.oci_vcns_networksource[v.vcn_name.0].virtual_networks.*.id[0] + ip_ranges = v.ip_ranges + } } #vcn_comp_map = each.value.vcn_comp_map != null ? each.value.vcn_comp_map : null - defined_tags = try (each.value.defined_tags, null) - freeform_tags = try (each.value.freeform_tags, null) + defined_tags = try(each.value.defined_tags, null) + freeform_tags = try(each.value.freeform_tags, null) } \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/instance.tf b/cd3_automation_toolkit/user-scripts/terraform/instance.tf index 26ed1f4e3..10b191ea0 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/instance.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/instance.tf @@ -8,7 +8,7 @@ data "oci_core_subnets" "oci_subnets" { # depends_on = [module.subnets] # Uncomment to create Network and Instances together for_each = var.instances != null ? var.instances : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.subnet_id vcn_id = data.oci_core_vcns.oci_vcns[each.key].virtual_networks.*.id[0] } @@ -16,16 +16,17 @@ data "oci_core_subnets" "oci_subnets" { data "oci_core_vcns" "oci_vcns" { # depends_on = [module.vcns] # Uncomment to create Network and Instances together for_each = var.instances != null ? var.instances : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } module "instances" { source = "./modules/compute/instance" + # depends_on = [module.nsgs] # Uncomment to create NSG and Instances together for_each = var.instances != null ? var.instances : {} availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null vcn_names = [each.value.vcn_name] dedicated_vm_host_name = each.value.dedicated_vm_host_id != null ? each.value.dedicated_vm_host_id : null shape = each.value.shape @@ -36,45 +37,45 @@ module "instances" { fault_domain = each.value.fault_domain freeform_tags = each.value.freeform_tags source_type = each.value.source_type - source_image_id = length(regexall("ocid1.image.oc1*", each.value.source_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", each.value.source_id)) > 0 ? each.value.source_id : lookup(var.instance_source_ocids, each.value.source_id, null) - subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc1*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets[each.key].subnets.*.id[0]) : null + source_image_id = length(regexall("ocid1.image.oc*", each.value.source_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", each.value.source_id)) > 0 ? each.value.source_id : lookup(var.instance_source_ocids, each.value.source_id, null) + subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets[each.key].subnets.*.id[0]) : null assign_public_ip = each.value.assign_public_ip ssh_public_keys = each.value.ssh_authorized_keys != null ? (length(regexall("ssh-rsa*", each.value.ssh_authorized_keys)) > 0 ? each.value.ssh_authorized_keys : lookup(var.instance_ssh_keys, each.value.ssh_authorized_keys, null)) : null hostname_label = each.value.hostname_label nsg_ids = each.value.nsg_ids - #nsg_ids = each.value.nsg_ids != [] ? [for nsg in each.value.nsg_ids : length(regexall("ocid1.networksecuritygroup.oc1*",nsg)) > 0 ? nsg : merge(module.nsgs.*...)[nsg]["nsg_tf_id"]] : [] + #nsg_ids = each.value.nsg_ids != [] ? [for nsg in each.value.nsg_ids : length(regexall("ocid1.networksecuritygroup.oc*",nsg)) > 0 ? nsg : merge(module.nsgs.*...)[nsg]["nsg_tf_id"]] : [] boot_volume_size_in_gbs = each.value.boot_volume_size_in_gbs != null ? each.value.boot_volume_size_in_gbs : null memory_in_gbs = each.value.memory_in_gbs != null ? each.value.memory_in_gbs : null capacity_reservation_id = each.value.capacity_reservation_id != null ? lookup(var.capacity_reservation_ocids, each.value.capacity_reservation_id, null) : null create_is_pv_encryption_in_transit_enabled = each.value.create_is_pv_encryption_in_transit_enabled - boot_tf_policy = each.value.backup_policy != null ? each.value.backup_policy : null - policy_tf_compartment_id = each.value.policy_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.policy_compartment_id)) > 0 ? each.value.policy_compartment_id : var.compartment_ocids[each.value.policy_compartment_id]) : null - remote_execute = each.value.remote_execute != null ? each.value.remote_execute : null - bastion_ip = each.value.bastion_ip != null ? each.value.bastion_ip : null - cloud_init_script = each.value.cloud_init_script != null ? each.value.cloud_init_script : null - launch_options = each.value.launch_options - plugins_details = each.value.plugins_details - platform_config = each.value.platform_config != null ? each.value.platform_config : null + boot_tf_policy = each.value.backup_policy != null ? each.value.backup_policy : null + policy_tf_compartment_id = each.value.policy_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.policy_compartment_id)) > 0 ? each.value.policy_compartment_id : var.compartment_ocids[each.value.policy_compartment_id]) : null + remote_execute = each.value.remote_execute != null ? each.value.remote_execute : null + bastion_ip = each.value.bastion_ip != null ? each.value.bastion_ip : null + cloud_init_script = each.value.cloud_init_script != null ? each.value.cloud_init_script : null + launch_options = each.value.launch_options + plugins_details = each.value.plugins_details + platform_config = each.value.platform_config != null ? each.value.platform_config : null is_live_migration_preferred = each.value.is_live_migration_preferred # extended_metadata = each.value.extended_metadata - skip_source_dest_check = each.value.skip_source_dest_check != null ? each.value.skip_source_dest_check : null + skip_source_dest_check = each.value.skip_source_dest_check != null ? each.value.skip_source_dest_check : null baseline_ocpu_utilization = each.value.baseline_ocpu_utilization # preemptible_instance_config = each.value.preemptible_instance_config - all_plugins_disabled = each.value.all_plugins_disabled - is_management_disabled = each.value.is_management_disabled - is_monitoring_disabled = each.value.is_monitoring_disabled - recovery_action = each.value.recovery_action + all_plugins_disabled = each.value.all_plugins_disabled + is_management_disabled = each.value.is_management_disabled + is_monitoring_disabled = each.value.is_monitoring_disabled + recovery_action = each.value.recovery_action are_legacy_imds_endpoints_disabled = each.value.are_legacy_imds_endpoints_disabled - ipxe_script = each.value.ipxe_script - preserve_boot_volume = each.value.preserve_boot_volume - assign_private_dns_record = each.value.assign_private_dns_record - vlan_id = each.value.vlan_id - kms_key_id = each.value.kms_key_id + ipxe_script = each.value.ipxe_script + preserve_boot_volume = each.value.preserve_boot_volume + assign_private_dns_record = each.value.assign_private_dns_record + vlan_id = each.value.vlan_id + kms_key_id = each.value.kms_key_id # VNIC Details - vnic_defined_tags = each.value.vnic_defined_tags + vnic_defined_tags = each.value.vnic_defined_tags vnic_freeform_tags = each.value.vnic_freeform_tags - vnic_display_name = each.value.vnic_display_name + vnic_display_name = each.value.vnic_display_name } diff --git a/cd3_automation_toolkit/user-scripts/terraform/kms.tf b/cd3_automation_toolkit/user-scripts/terraform/kms.tf index e34b43377..55b55897e 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/kms.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/kms.tf @@ -10,7 +10,7 @@ module "vaults" { for_each = var.vaults != null ? var.vaults : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null #Optional defined_tags = each.value.defined_tags @@ -24,7 +24,7 @@ module "keys" { for_each = var.keys != null ? var.keys : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null #Optional defined_tags = each.value.defined_tags diff --git a/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf b/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf index 76cae0391..bfb50901d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf @@ -9,7 +9,7 @@ data "oci_certificates_management_certificates" "certificates_backendsets" { for_each = var.backend_sets != null ? var.backend_sets : {} #Optional - compartment_id = each.value.instance_compartment != null ? (length(regexall("ocid1.compartment.oc1*", each.value.instance_compartment)) > 0 ? each.value.instance_compartment : var.compartment_ocids[each.value.instance_compartment]) : var.tenancy_ocid + compartment_id = each.value.instance_compartment != null ? (length(regexall("ocid1.compartment.oc*", each.value.instance_compartment)) > 0 ? each.value.instance_compartment : var.compartment_ocids[each.value.instance_compartment]) : var.tenancy_ocid name = each.value.certificate_name state = "AVAILABLE" } @@ -20,7 +20,7 @@ data "oci_core_instances" "instances" { for_each = var.backends != null ? var.backends : {} state = "RUNNING" #Required - compartment_id = each.value.instance_compartment != null && each.value.instance_compartment != "" ? (length(regexall("ocid1.compartment.oc1*", each.value.instance_compartment)) > 0 ? each.value.instance_compartment : var.compartment_ocids[each.value.instance_compartment]) : var.tenancy_ocid + compartment_id = each.value.instance_compartment != null && each.value.instance_compartment != "" ? (length(regexall("ocid1.compartment.oc*", each.value.instance_compartment)) > 0 ? each.value.instance_compartment : var.compartment_ocids[each.value.instance_compartment]) : var.tenancy_ocid } data "oci_core_instance" "instance_ip" { @@ -36,19 +36,19 @@ locals { } module "load-balancers" { - # depends_on = [module.vcns, module.subnets] # Uncomment to execute Networking and Load Balancer together + # depends_on = [module.vcns, module.subnets,module.nsgs] # Uncomment to execute Networking and Load Balancer together source = "./modules/loadbalancer/lb-load-balancer" for_each = var.load_balancers != null ? var.load_balancers : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null vcn_names = [each.value.vcn_name] display_name = each.value.display_name shape = each.value.shape != null ? each.value.shape : "100Mbps" # Default value as per OCI - #subnet_ids = flatten(tolist([for subnet in each.value.subnet_names : (length(regexall("ocid1.subnet.oc1*", subnet)) > 0 ? [subnet] : data.oci_core_subnets.oci_subnets_lbs[subnet].subnets[*].id)])) + #subnet_ids = flatten(tolist([for subnet in each.value.subnet_names : (length(regexall("ocid1.subnet.oc*", subnet)) > 0 ? [subnet] : data.oci_core_subnets.oci_subnets_lbs[subnet].subnets[*].id)])) subnet_ids = each.value.subnet_ids - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null #Optional defined_tags = each.value.defined_tags @@ -58,7 +58,7 @@ module "load-balancers" { network_security_group_ids = each.value.nsg_ids key_name = each.key load_balancers = var.load_balancers - reserved_ips_id = each.value.reserved_ips_id != null ? (lower(each.value.reserved_ips_id) != "n" ? (length(regexall("ocid1.publicip.oc1*", each.value.reserved_ips_id)) > 0 ? [each.value.reserved_ips_id] : [merge(module.lbr-reserved-ips.*...)[join("-", [each.key, "reserved", "ip"])].reserved_ip_tf_id]) : []) : [] + reserved_ips_id = each.value.reserved_ips_id != null ? (lower(each.value.reserved_ips_id) != "n" ? (length(regexall("ocid1.publicip.oc*", each.value.reserved_ips_id)) > 0 ? [each.value.reserved_ips_id] : [merge(module.lbr-reserved-ips.*...)[join("-", [each.key, "reserved", "ip"])].reserved_ip_tf_id]) : []) : [] } /* @@ -73,7 +73,7 @@ module "hostnames" { #Required hostname = each.value.hostname - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] name = each.value.name } @@ -89,7 +89,7 @@ module "certificates" { #Required certificate_name = each.value.certificate_name - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] #Optional ca_certificate = each.value.ca_certificate != null ? file(each.value.ca_certificate) : null @@ -111,7 +111,7 @@ module "cipher-suites" { #Required ciphers = each.value.ciphers name = each.value.name - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] } @@ -130,6 +130,7 @@ module "backend-sets" { #Optional interval_ms = each.value.interval_ms + is_force_plain_text = each.value.is_force_plain_text port = each.value.port response_body_regex = each.value.response_body_regex retries = each.value.retries @@ -137,7 +138,7 @@ module "backend-sets" { timeout_in_millis = each.value.timeout_in_millis url_path = each.value.url_path - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] name = each.value.name policy = each.value.policy backend_sets = var.backend_sets @@ -161,7 +162,7 @@ module "backends" { #Required backendset_name = merge(module.backend-sets.*...)[each.value.backendset_name].backend_set_tf_name ip_address = each.value.ip_address != "" ? (length(regexall("IP:", each.value.ip_address)) > 0 ? split("IP:", each.value.ip_address)[1] : data.oci_core_instance.instance_ip[each.key].private_ip) : null - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] port = each.value.port #Optional @@ -183,7 +184,7 @@ module "listeners" { #Required default_backend_set_name = merge(module.backend-sets.*...)[each.value.default_backend_set_name].backend_set_tf_name - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] name = each.value.name port = each.value.port protocol = each.value.protocol @@ -211,7 +212,7 @@ module "path-route-sets" { for_each = var.path_route_sets != null ? var.path_route_sets : {} #Required - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] name = each.value.name #Optional @@ -230,7 +231,7 @@ module "rule-sets" { for_each = var.rule_sets != null ? var.rule_sets : {} #Required - load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] + load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.load_balancer_id)) > 0 ? each.value.load_balancer_id : merge(module.load-balancers.*...)[each.value.load_balancer_id]["load_balancer_tf_id"] name = each.value.name #Optional @@ -255,7 +256,7 @@ module "loadbalancer-log-groups" { # Log Groups #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name @@ -278,9 +279,9 @@ module "loadbalancer-logs" { # Logs #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name - log_group_id = length(regexall("ocid1.loggroup.oc1*", each.value.log_group_id)) > 0 ? each.value.log_group_id : merge(module.loadbalancer-log-groups.*...)[each.value.log_group_id]["log_group_tf_id"] + log_group_id = length(regexall("ocid1.loggroup.oc*", each.value.log_group_id)) > 0 ? each.value.log_group_id : merge(module.loadbalancer-log-groups.*...)[each.value.log_group_id]["log_group_tf_id"] log_type = each.value.log_type #Required @@ -315,7 +316,7 @@ module "lbr-reserved-ips" { for_each = var.lbr_reserved_ips != null && var.lbr_reserved_ips != {} ? var.lbr_reserved_ips : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null lifetime = each.value.lifetime #Optional @@ -323,8 +324,8 @@ module "lbr-reserved-ips" { display_name = each.value.display_name freeform_tags = each.value.freeform_tags private_ip_id = each.value.private_ip_id - #private_ip_id = each.value.private_ip_id != null ? (length(regexall("ocid1.privateip.oc1*", each.value.private_ip_id)) > 0 ? each.value.private_ip_id : (length(regexall("\\.", each.value.private_ip_id)) == 3 ? local.private_ip_id[0][each.value.private_ip_id] : merge(module.private-ips.*...)[each.value.private_ip_id].private_ip_tf_id)) : null - #public_ip_pool_id = each.value.public_ip_pool_id != null ? (length(regexall("ocid1.publicippool.oc1*", each.value.public_ip_pool_id)) > 0 ? each.value.public_ip_pool_id : merge(module.public-ip-pools.*...)[each.value.public_ip_pool_id].public_ip_pool_tf_id) : null + #private_ip_id = each.value.private_ip_id != null ? (length(regexall("ocid1.privateip.oc*", each.value.private_ip_id)) > 0 ? each.value.private_ip_id : (length(regexall("\\.", each.value.private_ip_id)) == 3 ? local.private_ip_id[0][each.value.private_ip_id] : merge(module.private-ips.*...)[each.value.private_ip_id].private_ip_tf_id)) : null + #public_ip_pool_id = each.value.public_ip_pool_id != null ? (length(regexall("ocid1.publicippool.oc*", each.value.public_ip_pool_id)) > 0 ? each.value.public_ip_pool_id : merge(module.public-ip-pools.*...)[each.value.public_ip_pool_id].public_ip_pool_tf_id) : null } /* diff --git a/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf b/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf index 906cba28c..a7d0f1d57 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf @@ -12,10 +12,10 @@ module "alarms" { for_each = var.alarms != null ? var.alarms : {} alarm_name = each.value.alarm_name - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - destinations = [for tn in each.value.destinations : (length(regexall("ocid1.onstopic.oc1*", tn)) > 0 ? tn : merge(module.notifications-topics.*...)[tn]["topic_tf_id"])] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + destinations = [for tn in each.value.destinations : (length(regexall("ocid1.onstopic.oc*", tn)) > 0 ? tn : merge(module.notifications-topics.*...)[tn]["topic_tf_id"])] is_enabled = each.value.is_enabled - metric_compartment_id = each.value.metric_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.metric_compartment_id)) > 0 ? each.value.metric_compartment_id : var.compartment_ocids[each.value.metric_compartment_id]) : null + metric_compartment_id = each.value.metric_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.metric_compartment_id)) > 0 ? each.value.metric_compartment_id : var.compartment_ocids[each.value.metric_compartment_id]) : null namespace = each.value.namespace query = each.value.query severity = each.value.severity @@ -46,7 +46,7 @@ module "events" { for_each = var.events != null ? var.events : {} event_name = each.value.event_name - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null is_enabled = each.value.is_enabled description = each.value.description condition = each.value.condition @@ -74,7 +74,7 @@ module "notifications-topics" { source = "./modules/managementservices/notification-topic" for_each = var.notifications_topics != null ? var.notifications_topics : {} - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null description = each.value.description topic_name = each.value.topic_name @@ -88,10 +88,10 @@ module "notifications-subscriptions" { for_each = var.notifications_subscriptions != null ? var.notifications_subscriptions : {} depends_on = [module.notifications-topics] - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null endpoint = each.value.endpoint protocol = each.value.protocol - topic_id = length(regexall("ocid1.onstopic.oc1*", each.value.topic_id)) > 0 ? each.value.topic_id : merge(module.notifications-topics.*...)[each.value.topic_id]["topic_tf_id"] + topic_id = length(regexall("ocid1.onstopic.oc*", each.value.topic_id)) > 0 ? each.value.topic_id : merge(module.notifications-topics.*...)[each.value.topic_id]["topic_tf_id"] #Optional defined_tags = each.value.defined_tags freeform_tags = each.value.freeform_tags @@ -113,7 +113,7 @@ module "service-connectors" { for_each = var.service_connectors - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null logs_compartment_id = var.tenancy_ocid source_monitoring_details = each.value.source_details.source_kind == "monitoring" ? { for k, v in each.value.source_details.source_monitoring_details : lookup(var.compartment_ocids, k, "not_found") => v } : {} target_monitoring_details = each.value.target_details.target_kind == "monitoring" ? { for k, v in each.value.target_details.target_monitoring_details : lookup(var.compartment_ocids, k, "not_found") => v } : {} diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/data.tf index 9c500ff42..1c6f63a29 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/data.tf @@ -6,7 +6,7 @@ ############################# locals { - nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null + nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null ADs = [ for ad in data.oci_identity_availability_domains.ads.availability_domains : ad.name @@ -105,7 +105,7 @@ data "oci_core_volume_backup_policies" "boot_vol_custom_policy" { ################################ data "oci_marketplace_listing_package_agreements" "listing_package_agreements" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 #Required listing_id = data.oci_marketplace_listing.listing.0.id package_version = data.oci_marketplace_listing.listing.0.default_package_version @@ -115,7 +115,7 @@ data "oci_marketplace_listing_package_agreements" "listing_package_agreements" { } data "oci_marketplace_listing_package" "listing_package" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 #Required listing_id = data.oci_marketplace_listing.listing.0.id package_version = data.oci_marketplace_listing.listing.0.default_package_version @@ -125,7 +125,7 @@ data "oci_marketplace_listing_package" "listing_package" { } data "oci_marketplace_listing_packages" "listing_packages" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 #Required listing_id = data.oci_marketplace_listing.listing.0.id @@ -134,25 +134,25 @@ data "oci_marketplace_listing_packages" "listing_packages" { } data "oci_marketplace_listings" "listings" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 name = [var.source_image_id] #is_featured = true # Comment this line for GovCloud compartment_id = var.compartment_id } data "oci_marketplace_listing" "listing" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 listing_id = data.oci_marketplace_listings.listings.0.listings[0].id compartment_id = var.compartment_id } data "oci_core_app_catalog_listing_resource_versions" "app_catalog_listing_resource_versions" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 listing_id = data.oci_marketplace_listing_package.listing_package.0.app_catalog_listing_id } data "oci_core_app_catalog_listing_resource_version" "catalog_listing" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 listing_id = data.oci_marketplace_listing_package.listing_package.0.app_catalog_listing_id resource_version = data.oci_marketplace_listing_package.listing_package.0.app_catalog_listing_resource_version } diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf index 2b908637d..683be11a2 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf @@ -116,7 +116,7 @@ resource "oci_core_instance" "instance" { } source_details { - source_id = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? var.source_image_id : data.oci_core_app_catalog_listing_resource_version.catalog_listing.0.listing_resource_id + source_id = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? var.source_image_id : data.oci_core_app_catalog_listing_resource_version.catalog_listing.0.listing_resource_id source_type = var.source_type #Optional #boot_volume_size_in_gbs = var.boot_volume_size_in_gbs @@ -229,7 +229,7 @@ resource "oci_core_volume_backup_policy_assignment" "volume_backup_policy_assign ################################ resource "oci_marketplace_accepted_agreement" "accepted_agreement" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 #Required agreement_id = oci_marketplace_listing_package_agreement.listing_package_agreement.0.agreement_id compartment_id = var.compartment_id @@ -239,7 +239,7 @@ resource "oci_marketplace_accepted_agreement" "accepted_agreement" { } resource "oci_marketplace_listing_package_agreement" "listing_package_agreement" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 #Required agreement_id = data.oci_marketplace_listing_package_agreements.listing_package_agreements.0.agreements[0].id listing_id = data.oci_marketplace_listing.listing.0.id @@ -248,7 +248,7 @@ resource "oci_marketplace_listing_package_agreement" "listing_package_agreement" #------ Get Image Agreement resource "oci_core_app_catalog_listing_resource_version_agreement" "mp_image_agreement" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 listing_id = data.oci_marketplace_listing_package.listing_package.0.app_catalog_listing_id #listing_resource_version = data.oci_marketplace_listing_package.listing_package.0.app_catalog_listing_resource_version listing_resource_version = data.oci_core_app_catalog_listing_resource_versions.app_catalog_listing_resource_versions.0.app_catalog_listing_resource_versions[0].listing_resource_version @@ -258,7 +258,7 @@ resource "oci_core_app_catalog_listing_resource_version_agreement" "mp_image_agr # ------ Accept Terms and Subscribe to the image, placing the image in a particular compartment resource "oci_core_app_catalog_subscription" "mp_image_subscription" { - count = length(regexall("ocid1.image.oc1*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc1*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 + count = length(regexall("ocid1.image.oc*", var.source_image_id)) > 0 || length(regexall("ocid1.bootvolume.oc*", var.source_image_id)) > 0 || var.source_image_id == null ? 0 : 1 compartment_id = var.compartment_id eula_link = oci_core_app_catalog_listing_resource_version_agreement.mp_image_agreement[0].eula_link listing_id = oci_core_app_catalog_listing_resource_version_agreement.mp_image_agreement[0].listing_id diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/adb/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/adb/data.tf index bed07b08a..4d1cf1f93 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/database/adb/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/adb/data.tf @@ -6,7 +6,7 @@ ############################# locals { - nsg_ids = flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_adb[nsg].network_security_groups[*].id)])) + nsg_ids = flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_adb[nsg].network_security_groups[*].id)])) } data "oci_core_vcns" "oci_vcns_adb" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/dbsystem-vm-bm/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/dbsystem-vm-bm/data.tf index b4091bc19..ad18dc09d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/database/dbsystem-vm-bm/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/dbsystem-vm-bm/data.tf @@ -6,7 +6,7 @@ ############################# locals { - nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_dbsystems[nsg].network_security_groups[*].id)])) : null + nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_dbsystems[nsg].network_security_groups[*].id)])) : null } data "oci_core_vcns" "oci_vcns_dbsystems" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/exa-vmcluster/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/exa-vmcluster/data.tf index ea24831dc..9a8582bdf 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/database/exa-vmcluster/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/exa-vmcluster/data.tf @@ -6,8 +6,8 @@ ############################# locals { - nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_db_exacs[nsg].network_security_groups[*].id)])) : null - backup_nsg_ids = var.backup_network_nsg_ids != null ? flatten(tolist([for nsg in var.backup_network_nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_backup_db_exacs[nsg].network_security_groups[*].id)])) : null + nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_db_exacs[nsg].network_security_groups[*].id)])) : null + backup_nsg_ids = var.backup_network_nsg_ids != null ? flatten(tolist([for nsg in var.backup_network_nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_backup_db_exacs[nsg].network_security_groups[*].id)])) : null db_servers = flatten(toset([for server in data.oci_database_db_servers.all_db_servers : server.db_servers[*].id ])) } diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/identity/iam-user/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/identity/iam-user/main.tf index 9e3f146bc..2ab4abf88 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/identity/iam-user/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/identity/iam-user/main.tf @@ -23,7 +23,7 @@ resource "oci_identity_user_group_membership" "user_group_membership" { count = var.group_membership != null ? length(var.group_membership) : 0 depends_on = [oci_identity_user.user] user_id = oci_identity_user.user.id - group_id = length(regexall("ocid1.group.oc1*", var.group_membership[count.index])) > 0 ? var.group_membership[count.index] : data.oci_identity_groups.iam_groups.groups[index(data.oci_identity_groups.iam_groups.groups.*.name, var.group_membership[count.index])].id + group_id = length(regexall("ocid1.group.oc*", var.group_membership[count.index])) > 0 ? var.group_membership[count.index] : data.oci_identity_groups.iam_groups.groups[index(data.oci_identity_groups.iam_groups.groups.*.name, var.group_membership[count.index])].id } resource "oci_identity_user_capabilities_management" "user_capabilities_management" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/main.tf index ef7af776e..8e1d91c9b 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/main.tf @@ -13,6 +13,7 @@ resource "oci_load_balancer_backend_set" "backend_set" { #Optional interval_ms = var.interval_ms + is_force_plain_text = var.is_force_plain_text port = var.port response_body_regex = var.response_body_regex retries = var.retries diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/variables.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/variables.tf index 34fad60c9..b4e3b1135 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/variables.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-backend-set/variables.tf @@ -17,6 +17,11 @@ variable "interval_ms" { default = 10000 # Default as per hashicorp terraform } +variable "is_force_plain_text" { + type = string + description = "Specifies if health checks should always be done using plain text instead of depending on whether or not the associated backend set is using SSL." +} + variable "port" { type = number description = "The backend server port against which to run the health check." diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/data.tf index 46a2001e9..943ba1534 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/data.tf @@ -6,7 +6,7 @@ ############################# locals { - nsg_ids = var.network_security_group_ids != null ? flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null + nsg_ids = var.network_security_group_ids != null ? flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null } data "oci_core_network_security_groups" "network_security_groups" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/main.tf index 7f442e00d..5e8e09f23 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/loadbalancer/lb-load-balancer/main.tf @@ -11,7 +11,7 @@ resource "oci_load_balancer_load_balancer" "load_balancer" { display_name = var.display_name shape = var.shape #subnet_ids = var.subnet_ids - subnet_ids = flatten(tolist([for subnet in var.subnet_ids : (length(regexall("ocid1.subnet.oc1*", subnet)) > 0 ? [subnet] : data.oci_core_subnets.oci_subnets_lbs[subnet].subnets[*].id)])) + subnet_ids = flatten(tolist([for subnet in var.subnet_ids : (length(regexall("ocid1.subnet.oc*", subnet)) > 0 ? [subnet] : data.oci_core_subnets.oci_subnets_lbs[subnet].subnets[*].id)])) #Optional diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/alarm/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/alarm/main.tf index ee7c6742b..a63151473 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/alarm/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/alarm/main.tf @@ -17,6 +17,7 @@ resource "oci_monitoring_alarm" "alarm" { query = var.query severity = var.severity body = var.body + message_format = var.message_format #metric_compartment_id_in_subtree = var.alarm_metric_compartment_id_in_subtree pending_duration = var.trigger_delay_minutes diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/event/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/event/main.tf index b042490f9..819a42661 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/event/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/event/main.tf @@ -26,7 +26,7 @@ resource "oci_events_rule" "event" { description = actions.value.description != "" ? actions.value.description : null function_id = actions.value.function_id stream_id = actions.value.stream_id - topic_id = (actions.value.topic_id != "" && actions.value.topic_id != null) ? (length(regexall("ocid1.onstopic.oc1*", actions.value.topic_id)) > 0 ? actions.value.topic_id : var.topic_name[actions.value.topic_id]["topic_tf_id"]) : null + topic_id = (actions.value.topic_id != "" && actions.value.topic_id != null) ? (length(regexall("ocid1.onstopic.oc*", actions.value.topic_id)) > 0 ? actions.value.topic_id : var.topic_name[actions.value.topic_id]["topic_tf_id"]) : null } } } diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/network/default-route-table/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/network/default-route-table/main.tf index c12f672f4..e5de53947 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/network/default-route-table/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/network/default-route-table/main.tf @@ -25,7 +25,7 @@ resource "oci_core_default_route_table" "default_route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.privateip.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : "" + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.privateip.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : "" #Optional description = route_rules.value["description"] @@ -40,7 +40,7 @@ resource "oci_core_default_route_table" "default_route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.localpeeringgateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : try(var.hub_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.spoke_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.exported_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"]) + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.localpeeringgateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : try(var.hub_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.spoke_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.exported_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"]) #Optional description = route_rules.value["description"] @@ -55,7 +55,7 @@ resource "oci_core_default_route_table" "default_route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.internetgateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.igw_id[route_rules.value["network_entity_id"]]["igw_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.internetgateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.igw_id[route_rules.value["network_entity_id"]]["igw_tf_id"] #Optional description = route_rules.value["description"] @@ -70,10 +70,10 @@ resource "oci_core_default_route_table" "default_route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"] - #length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null + #length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null #Optional description = route_rules.value["description"] @@ -88,10 +88,10 @@ resource "oci_core_default_route_table" "default_route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.natgateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.ngw_id[route_rules.value["network_entity_id"]]["ngw_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.natgateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.ngw_id[route_rules.value["network_entity_id"]]["ngw_tf_id"] - #length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null + #length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null #Optional description = route_rules.value["description"] @@ -106,10 +106,10 @@ resource "oci_core_default_route_table" "default_route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.servicegateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.sgw_id[route_rules.value["network_entity_id"]]["sgw_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.servicegateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.sgw_id[route_rules.value["network_entity_id"]]["sgw_tf_id"] - #length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null + #length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null #Optional description = route_rules.value["description"] diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/network/drg-route-distribution-statement/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/network/drg-route-distribution-statement/main.tf index 4e0401e6c..6b2d20198 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/network/drg-route-distribution-statement/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/network/drg-route-distribution-statement/main.tf @@ -21,7 +21,7 @@ resource "oci_core_drg_route_distribution_statement" "drg_route_distribution_sta #Optional attachment_type = match_criteria.value.attachment_type - drg_attachment_id = match_criteria.value.drg_attachment_id != "" && match_criteria.value.drg_attachment_id != null ? (length(regexall("ocid1.drgattachment.oc1*", match_criteria.value.drg_attachment_id)) > 0 ? match_criteria.value.drg_attachment_id : var.drg_attachment_ids[match_criteria.value.drg_attachment_id]["drg_attachment_tf_id"]) : "" + drg_attachment_id = match_criteria.value.drg_attachment_id != "" && match_criteria.value.drg_attachment_id != null ? (length(regexall("ocid1.drgattachment.oc*", match_criteria.value.drg_attachment_id)) > 0 ? match_criteria.value.drg_attachment_id : var.drg_attachment_ids[match_criteria.value.drg_attachment_id]["drg_attachment_tf_id"]) : "" } } } \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/network/route-table/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/network/route-table/main.tf index 7929aad31..9f089ad47 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/network/route-table/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/network/route-table/main.tf @@ -26,7 +26,7 @@ resource "oci_core_route_table" "route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.privateip.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : "" + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.privateip.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : "" #Optional description = route_rules.value["description"] != "" ? route_rules.value["description"] : null @@ -41,7 +41,7 @@ resource "oci_core_route_table" "route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.localpeeringgateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : try(var.hub_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.spoke_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.peer_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.none_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.exported_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"]) + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.localpeeringgateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : try(var.hub_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.spoke_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.peer_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.none_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.exported_lpg_id[route_rules.value["network_entity_id"]]["lpg_tf_id"], var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"]) #Optional description = route_rules.value["description"] != "" ? route_rules.value["description"] : null @@ -56,7 +56,7 @@ resource "oci_core_route_table" "route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.internetgateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.igw_id[route_rules.value["network_entity_id"]]["igw_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.internetgateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.igw_id[route_rules.value["network_entity_id"]]["igw_tf_id"] #Optional description = route_rules.value["description"] != "" ? route_rules.value["description"] : null @@ -71,10 +71,10 @@ resource "oci_core_route_table" "route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.drg_id[route_rules.value["network_entity_id"]]["drg_tf_id"] - #length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null + #length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null #Optional description = route_rules.value["description"] != "" ? route_rules.value["description"] : null @@ -89,10 +89,10 @@ resource "oci_core_route_table" "route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.natgateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.ngw_id[route_rules.value["network_entity_id"]]["ngw_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.natgateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.ngw_id[route_rules.value["network_entity_id"]]["ngw_tf_id"] - #length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null + #length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null #Optional description = route_rules.value["description"] != "" ? route_rules.value["description"] : null @@ -107,10 +107,10 @@ resource "oci_core_route_table" "route_table" { content { #Required - network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.servicegateway.oc1*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.sgw_id[route_rules.value["network_entity_id"]]["sgw_tf_id"] + network_entity_id = (route_rules.value["network_entity_id"] != null && length(regexall("ocid1.servicegateway.oc*", route_rules.value["network_entity_id"])) > 0) ? route_rules.value["network_entity_id"] : var.sgw_id[route_rules.value["network_entity_id"]]["sgw_tf_id"] - #length(regexall("ocid1.drg.oc1*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null + #length(regexall("ocid1.drg.oc*", route_rules.value["network_entity_id"])) > 0 ? route_rules.value["network_entity_id"] : null #Optional description = route_rules.value["description"] != "" ? route_rules.value["description"] : null diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/network/vlan/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/network/vlan/data.tf index 912b4869a..74b845e1f 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/network/vlan/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/network/vlan/data.tf @@ -6,7 +6,7 @@ ############################# locals { - nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null + nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null } data "oci_core_network_security_groups" "network_security_groups" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/main.tf index 20a50861c..c8dc6c1a8 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/main.tf @@ -14,6 +14,7 @@ resource "oci_network_load_balancer_backend" "backend" { #Optional ip_address = var.ip_address != "" ? (length(regexall("IP:", var.ip_address)) > 0 ? split("IP:", var.ip_address)[1] : data.oci_core_instance.nlb_instance_ip[0].private_ip) : null is_drain = var.is_drain + is_backup = var.is_backup is_offline = var.is_offline name = length(regexall("IP:", var.ip_address)) > 0 ? join(":", [split("IP:", var.ip_address)[1], var.port]) : join(":", [merge(local.nlb_private_ip_ocid.private_ocids.*...)[merge(local.nlb_instance_vnic_ocid.vnic_ocids.*...)[merge(local.nlb_instance_ocid.ocid.*...)[split("NAME:", var.ip_address)[1]][0]][0]][0], var.port]) target_id = length(regexall("IP:*", var.ip_address)) == 0 ? merge(local.nlb_private_ip_ocid.private_ocids.*...)[merge(local.nlb_instance_vnic_ocid.vnic_ocids.*...)[merge(local.nlb_instance_ocid.ocid.*...)[split("NAME:", var.ip_address)[1]][0]][0]][0] : null diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/variables.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/variables.tf index 3b5fd81f8..e56315b5b 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/variables.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backend/variables.tf @@ -43,6 +43,12 @@ variable "is_drain" { default = false # Default value as per hashicorp terraform } +variable "is_backup" { + type = bool + description = "Whether the load balancer should treat this server as a backup unit." + default = false # Default value as per hashicorp terraform +} + variable "is_offline" { type = bool description = "Whether the load balancer should treat this server as offline. Offline servers receive no incoming traffic." diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/main.tf index 5b1be7eb3..1ed3bcc7d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/main.tf @@ -15,7 +15,9 @@ resource "oci_network_load_balancer_backend_set" "backend_set" { #Optional interval_in_millis = var.interval_in_millis port = var.port + request_data = var.request_data response_body_regex = var.response_body_regex + response_data = var.response_data retries = var.retries return_code = var.return_code timeout_in_millis = var.timeout_in_millis diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/variables.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/variables.tf index be8179a34..7bf29d5df 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/variables.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb-backendset/variables.tf @@ -23,12 +23,24 @@ variable "port" { default = null } +variable "request_data" { + type = string + description = "Base64 encoded pattern to be sent as UDP or TCP health check probe.r" + default = null +} + variable "response_body_regex" { type = string description = "A regular expression for parsing the response body from the backend server" default = null } +variable "response_data" { + type = string + description = "Base64 encoded pattern to be validated as UDP or TCP health check probe response." + default = null +} + variable "retries" { type = number description = " The number of retries to attempt before a backend server is considered unhealthy" diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/data.tf index a7eebc7ac..70810e523 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/data.tf @@ -6,7 +6,7 @@ ####################################### locals { - nsg_ids = var.network_security_group_ids != null ? flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null + nsg_ids = var.network_security_group_ids != null ? flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null } data "oci_core_network_security_groups" "network_security_groups" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/main.tf index 9646a8bca..ed808f378 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/main.tf @@ -12,6 +12,7 @@ resource "oci_network_load_balancer_network_load_balancer" "network_load_balance display_name = var.display_name subnet_id = var.subnet_id is_preserve_source_destination = var.is_preserve_source_destination + is_symmetric_hash_enabled = var.is_symmetric_hash_enabled is_private = var.is_private network_security_group_ids = var.network_security_group_ids != null ? (local.nsg_ids == [] ? ["INVALID NSG Name"] : local.nsg_ids) : null nlb_ip_version = var.nlb_ip_version diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/variables.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/variables.tf index 175608ad9..217076b3d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/variables.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/networkloadbalancer/nlb/variables.tf @@ -37,6 +37,11 @@ variable "is_private" { default = true } +variable "is_symmetric_hash_enabled" { + type = bool + +} + variable "network_security_group_ids" { type = list(any) description = "NSGs to place the load balancer in" diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/data.tf index a68298abd..781f3c69e 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/data.tf @@ -6,7 +6,7 @@ ############################# locals { - endpoint_nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null + endpoint_nsg_ids = var.nsg_ids != null ? flatten(tolist([for nsg in var.nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id)])) : null } data "oci_core_network_security_groups" "network_security_groups" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/main.tf index a84804028..610fb4e8e 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/oke/cluster/main.tf @@ -36,7 +36,7 @@ resource "oci_containerengine_cluster" "cluster" { pods_cidr = var.pods_cidr services_cidr = var.services_cidr } - service_lb_subnet_ids = flatten(tolist([for subnet in var.service_lb_subnet_ids : (length(regexall("ocid1.subnet.oc1*", subnet)) > 0 ? [subnet] : data.oci_core_subnets.oci_subnets_cluster_lbs[subnet].subnets[*].id)])) + service_lb_subnet_ids = flatten(tolist([for subnet in var.service_lb_subnet_ids : (length(regexall("ocid1.subnet.oc*", subnet)) > 0 ? [subnet] : data.oci_core_subnets.oci_subnets_cluster_lbs[subnet].subnets[*].id)])) } lifecycle { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/oke/nodepool/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/oke/nodepool/data.tf index 4f532b41e..738bddd9e 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/oke/nodepool/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/oke/nodepool/data.tf @@ -6,8 +6,8 @@ ############################# locals { - nodepool_nsg_ids = var.worker_nsg_ids != null ? flatten(tolist([for nsg in var.worker_nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_workers[nsg].network_security_groups[*].id)])) : null - pod_nsg_ids = var.pod_nsg_ids != null ? flatten(tolist([for nsg in var.pod_nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_pods[nsg].network_security_groups[*].id)])) : null + nodepool_nsg_ids = var.worker_nsg_ids != null ? flatten(tolist([for nsg in var.worker_nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_workers[nsg].network_security_groups[*].id)])) : null + pod_nsg_ids = var.pod_nsg_ids != null ? flatten(tolist([for nsg in var.pod_nsg_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_pods[nsg].network_security_groups[*].id)])) : null } data "oci_identity_availability_domains" "ads" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/address-list/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/address-list/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/address-list/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/address-list/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application-group/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application-group/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application-group/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application-group/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/application/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-profile/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-profile/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-profile/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-profile/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-rules/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-rules/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-rules/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/decryption-rules/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/firewall/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/firewall/data.tf index 442286b0a..68978ed4b 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/firewall/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/firewall/data.tf @@ -12,7 +12,7 @@ data "oci_core_network_security_groups" "network_security_groups" { locals { - nsg_id = var.nsg_id != null ? flatten(tolist([for nsg in var.nsg_id : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id) ])) : null + nsg_id = var.nsg_id != null ? flatten(tolist([for nsg in var.nsg_id : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups[nsg].network_security_groups[*].id) ])) : null } diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/security-rules/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/security-rules/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/security-rules/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/security-rules/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service-list/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service-list/data.tf index e9c38d5be..a68341946 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service-list/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service-list/data.tf @@ -1,6 +1,6 @@ /* locals { - services = var.services != null ? flatten(tolist([for sid in var.services : (length(regexall("ocid1.networkfirewallpolicy.oc1*", sid)) > 0 ? [sid] : data.oci_network_firewall_network_firewall_policy_services.fw-services[sid].*.name)])) : null + services = var.services != null ? flatten(tolist([for sid in var.services : (length(regexall("ocid1.networkfirewallpolicy.oc*", sid)) > 0 ? [sid] : data.oci_network_firewall_network_firewall_policy_services.fw-services[sid].*.name)])) : null } data "oci_network_firewall_network_firewall_policy_services" "fw-services" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/service/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/url-list/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/url-list/data.tf index 07b270bd3..12fa5c62d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/url-list/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/url-list/data.tf @@ -3,6 +3,6 @@ } data "oci_network_firewall_network_firewall_policies" "fw-policy" { - compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] display_name = var.network_firewall_policy_id */ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/storage/block-volume/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/storage/block-volume/main.tf index fa8b01c8a..1620c536f 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/storage/block-volume/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/storage/block-volume/main.tf @@ -48,7 +48,7 @@ resource "oci_core_volume_attachment" "block_vol_instance_attachment" { #################################### locals { - #existing_volume_id = length(data.oci_core_volumes.all_volumes[0].volumes) > 0 ? length(regexall("ocid1.volume.oc1*", data.oci_core_volumes.all_volumes[0].volumes[0].id)) > 0 ? data.oci_core_volumes.all_volumes[0].volumes[0].id : "" : "" + #existing_volume_id = length(data.oci_core_volumes.all_volumes[0].volumes) > 0 ? length(regexall("ocid1.volume.oc*", data.oci_core_volumes.all_volumes[0].volumes[0].id)) > 0 ? data.oci_core_volumes.all_volumes[0].volumes[0].id : "" : "" policy_tf_compartment_id = var.policy_tf_compartment_id != null ? var.policy_tf_compartment_id : null current_policy_id = var.block_tf_policy != null ? (lower(var.block_tf_policy) == "gold" || lower(var.block_tf_policy) == "silver" || lower(var.block_tf_policy) == "bronze" ? data.oci_core_volume_backup_policies.block_vol_backup_policy[0].volume_backup_policies.0.id : data.oci_core_volume_backup_policies.block_vol_custom_policy[0].volume_backup_policies.0.id) : "" } diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/storage/file-storage/mount-target/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/storage/file-storage/mount-target/data.tf index 691dc87a9..97d467587 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/storage/file-storage/mount-target/data.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/storage/file-storage/mount-target/data.tf @@ -6,7 +6,7 @@ ############################# locals { - nsg_ids = var.network_security_group_ids != null ? flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc1*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_mt[nsg].network_security_groups[*].id)])) : null + nsg_ids = var.network_security_group_ids != null ? flatten(tolist([for nsg in var.network_security_group_ids : (length(regexall("ocid1.networksecuritygroup.oc*", nsg)) > 0 ? [nsg] : data.oci_core_network_security_groups.network_security_groups_mt[nsg].network_security_groups[*].id)])) : null } data "oci_core_network_security_groups" "network_security_groups_mt" { diff --git a/cd3_automation_toolkit/user-scripts/terraform/network.tf b/cd3_automation_toolkit/user-scripts/terraform/network.tf index 0456317ca..aea32134d 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/network.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/network.tf @@ -9,7 +9,7 @@ data "oci_core_drg_route_tables" "drg_route_tables" { for_each = (var.data_drg_route_tables != null || var.data_drg_route_tables != {}) ? var.data_drg_route_tables : {} #Required - drg_id = length(regexall("ocid1.drg.oc1*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"] + drg_id = length(regexall("ocid1.drg.oc*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"] filter { name = "display_name" values = [each.value.values] @@ -22,7 +22,7 @@ data "oci_core_drg_route_distributions" "drg_route_distributions" { for_each = (var.data_drg_route_table_distributions != null || var.data_drg_route_table_distributions != {}) ? var.data_drg_route_table_distributions : {} #Required - drg_id = length(regexall("ocid1.drg.oc1*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"] + drg_id = length(regexall("ocid1.drg.oc*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"] filter { name = "display_name" values = [each.value.values] @@ -40,8 +40,8 @@ module "vcns" { for_each = var.vcns != null ? var.vcns : {} #Required - #compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : try(zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.compartment_id], var.compartment_ocids[each.value.compartment_id]) - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + #compartment_id = length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : try(zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.compartment_id], var.compartment_ocids[each.value.compartment_id]) + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null #Optional cidr_blocks = each.value.cidr_blocks @@ -74,15 +74,15 @@ module "igws" { depends_on = [module.vcns] #,module.route-tables] #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional enabled = each.value.enable_igw # Defaults to true by terraform hashicorp defined_tags = each.value.defined_tags display_name = each.value.igw_name != null ? each.value.igw_name : null freeform_tags = each.value.freeform_tags - #route_table_id = each.value.route_table_id == null || each.value.route_table_id == "" ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] : (length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"]) + #route_table_id = each.value.route_table_id == null || each.value.route_table_id == "" ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] : (length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"]) } @@ -104,8 +104,8 @@ module "ngws" { depends_on = [module.vcns] #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional block_traffic = each.value.block_traffic # Defaults to false by terraform hashicorp @@ -133,12 +133,12 @@ module "hub-lpgs" { depends_on = [module.vcns, module.spoke-lpgs, module.none-lpgs, module.exported-lpgs, module.peer-lpgs] #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional peer_id = each.value.peer_id != "" ? (length(regexall("##peer_id*", each.value.peer_id)) > 0 ? null : try(merge(module.spoke-lpgs.*...)[each.value.peer_id]["lpg_tf_id"], merge(module.exported-lpgs.*...)[each.value.peer_id]["lpg_tf_id"], merge(module.peer-lpgs.*...)[each.value.peer_id]["lpg_tf_id"], merge(module.none-lpgs.*...)[each.value.peer_id]["lpg_tf_id"])) : null - #route_table_id = length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] + #route_table_id = length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] defined_tags = each.value.defined_tags display_name = each.value.lpg_name != null ? each.value.lpg_name : null freeform_tags = each.value.freeform_tags @@ -151,12 +151,12 @@ module "spoke-lpgs" { depends_on = [module.vcns] #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional peer_id = (each.value.peer_id != "" && each.value.peer_id != null) ? (length(regexall("##peer_id*", each.value.peer_id)) > 0 ? null : each.value.peer_id) : null - #route_table_id = length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] + #route_table_id = length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] defined_tags = each.value.defined_tags display_name = each.value.lpg_name != null ? each.value.lpg_name : null freeform_tags = each.value.freeform_tags @@ -169,12 +169,12 @@ module "peer-lpgs" { depends_on = [module.vcns, module.none-lpgs] #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional peer_id = each.value.peer_id != "" ? (length(regexall("##peer_id*", each.value.peer_id)) > 0 ? null : try(merge(module.spoke-lpgs.*...)[each.value.peer_id]["lpg_tf_id"], merge(module.exported-lpgs.*...)[each.value.peer_id]["lpg_tf_id"], merge(module.none-lpgs.*...)[each.value.peer_id]["lpg_tf_id"])) : null - #route_table_id = length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] + #route_table_id = length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] defined_tags = each.value.defined_tags display_name = each.value.lpg_name != null ? each.value.lpg_name : null freeform_tags = each.value.freeform_tags @@ -187,12 +187,12 @@ module "none-lpgs" { depends_on = [module.vcns] #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional peer_id = (each.value.peer_id != "" && each.value.peer_id != null) ? (length(regexall("##peer_id*", each.value.peer_id)) > 0 ? null : each.value.peer_id) : null - #route_table_id = length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] + #route_table_id = length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] defined_tags = each.value.defined_tags display_name = each.value.lpg_name != null ? each.value.lpg_name : null freeform_tags = each.value.freeform_tags @@ -205,12 +205,12 @@ module "exported-lpgs" { depends_on = [module.vcns] #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional peer_id = (each.value.peer_id != "" && each.value.peer_id != null) ? (length(regexall("##peer_id*", each.value.peer_id)) > 0 ? null : each.value.peer_id) : null - #route_table_id = length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] + #route_table_id = length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"] defined_tags = each.value.defined_tags display_name = each.value.lpg_name != null ? each.value.lpg_name : null freeform_tags = each.value.freeform_tags @@ -248,15 +248,15 @@ module "sgws" { for_each = (var.sgws != null || var.sgws != {}) ? var.sgws : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional defined_tags = each.value.defined_tags display_name = each.value.sgw_name != null ? each.value.sgw_name : null freeform_tags = each.value.freeform_tags service = each.value.service != "" ? (contains(split("-", each.value.service), "all") == true ? "all" : "objectstorage") : "all" - #route_table_id = length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : ((each.value.route_table_id != "" && each.value.route_table_id != null) ? (length(regexall(".Default-Route-Table-for*", each.value.route_table_id)) > 0 ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"]) : null) + #route_table_id = length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : ((each.value.route_table_id != "" && each.value.route_table_id != null) ? (length(regexall(".Default-Route-Table-for*", each.value.route_table_id)) > 0 ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"]) : null) } /* @@ -276,7 +276,7 @@ module "drgs" { for_each = (var.drgs != null || var.drgs != {}) ? var.drgs : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null #Optional defined_tags = each.value.defined_tags @@ -293,8 +293,8 @@ module "drg-attachments" { drg_display_name = each.value.display_name defined_tags = each.value.defined_tags freeform_tags = each.value.freeform_tags - drg_id = length(regexall("ocid1.drg.oc1*", each.value.drg_id)) > 0 ? each.value.drg_id : ((each.value.drg_id != "" && each.value.drg_id != null) ? merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"] : each.value.drg_id) - drg_route_table_id = length(regexall("ocid1.drgroutetable.oc1*", each.value.drg_route_table_id)) > 0 ? each.value.drg_route_table_id : ((each.value.drg_route_table_id != "" && each.value.drg_route_table_id != null) ? merge(module.drg-route-tables.*...)[each.value.drg_route_table_id]["drg_route_table_tf_id"] : null) + drg_id = length(regexall("ocid1.drg.oc*", each.value.drg_id)) > 0 ? each.value.drg_id : ((each.value.drg_id != "" && each.value.drg_id != null) ? merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"] : each.value.drg_id) + drg_route_table_id = length(regexall("ocid1.drgroutetable.oc*", each.value.drg_route_table_id)) > 0 ? each.value.drg_route_table_id : ((each.value.drg_route_table_id != "" && each.value.drg_route_table_id != null) ? merge(module.drg-route-tables.*...)[each.value.drg_route_table_id]["drg_route_table_tf_id"] : null) vcns_tf_id = merge(module.vcns.*...) route_table_tf_id = merge(module.route-tables.*...) default_route_table_tf_id = merge(module.default-route-tables.*...) @@ -324,7 +324,7 @@ module "default-dhcps" { for_each = (var.default_dhcps != null || var.default_dhcps != {}) ? var.default_dhcps : {} #Required - manage_default_resource_id = length(regexall("ocid1.dhcpoptions.oc1*", each.value.manage_default_resource_id)) > 0 ? each.value.manage_default_resource_id : merge(module.vcns.*...)[each.value.manage_default_resource_id]["vcn_default_dhcp_id"] + manage_default_resource_id = length(regexall("ocid1.dhcpoptions.oc*", each.value.manage_default_resource_id)) > 0 ? each.value.manage_default_resource_id : merge(module.vcns.*...)[each.value.manage_default_resource_id]["vcn_default_dhcp_id"] server_type = each.value.server_type custom_dns_servers = each.value.custom_dns_servers search_domain_names = each.value.search_domain != null ? each.value.search_domain.names : [] @@ -351,8 +351,8 @@ module "custom-dhcps" { for_each = (var.custom_dhcps != null || var.custom_dhcps != {}) ? var.custom_dhcps : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] server_type = each.value.server_type custom_dns_servers = each.value.custom_dns_servers != null ? each.value.custom_dns_servers : [] @@ -381,7 +381,7 @@ module "default-security-lists" { for_each = (var.default_seclists != null || var.default_seclists != {}) ? var.default_seclists : {} #Required - manage_default_resource_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_security_list_id"] + manage_default_resource_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_security_list_id"] key_name = each.key defined_tags = each.value.defined_tags @@ -406,9 +406,9 @@ module "security-lists" { for_each = (var.seclists != null || var.seclists != {}) ? var.seclists : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] key_name = each.key defined_tags = each.value.defined_tags @@ -433,7 +433,7 @@ module "default-route-tables" { for_each = (var.default_route_tables != null || var.default_route_tables != {}) ? var.default_route_tables : {} #Required - manage_default_resource_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] + manage_default_resource_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] #Optional defined_tags = each.value.defined_tags @@ -466,8 +466,8 @@ module "route-tables" { for_each = (var.route_tables != null || var.route_tables != {}) ? var.route_tables : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] #Optional defined_tags = each.value.defined_tags @@ -502,13 +502,13 @@ module "drg-route-tables" { for_each = (var.drg_route_tables != null || var.drg_route_tables != {}) ? var.drg_route_tables : {} #Required - drg_id = each.value.drg_id != null && each.value.drg_id != "" ? (length(regexall("ocid1.drg.oc1*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"]) : null + drg_id = each.value.drg_id != null && each.value.drg_id != "" ? (length(regexall("ocid1.drg.oc*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"]) : null #Optional defined_tags = each.value.defined_tags == {} ? null : each.value.defined_tags freeform_tags = each.value.freeform_tags == {} ? null : each.value.freeform_tags display_name = each.value.display_name != null ? each.value.display_name : null - import_drg_route_distribution_id = each.value.import_drg_route_distribution_id != null && each.value.import_drg_route_distribution_id != "" ? (length(regexall("ocid1.drgroutedistribution.oc1*", each.value.import_drg_route_distribution_id)) > 0 ? each.value.import_drg_route_distribution_id : (length(regexall(".Autogenerated-Import-Route-Distribution-for*", each.value.import_drg_route_distribution_id)) > 0 ? data.oci_core_drg_route_distributions.drg_route_distributions[each.value.import_drg_route_distribution_id].drg_route_distributions[0].id : merge(module.drg-route-distributions.*...)[each.value.import_drg_route_distribution_id]["drg_route_distribution_tf_id"])) : null + import_drg_route_distribution_id = each.value.import_drg_route_distribution_id != null && each.value.import_drg_route_distribution_id != "" ? (length(regexall("ocid1.drgroutedistribution.oc*", each.value.import_drg_route_distribution_id)) > 0 ? each.value.import_drg_route_distribution_id : (length(regexall(".Autogenerated-Import-Route-Distribution-for*", each.value.import_drg_route_distribution_id)) > 0 ? data.oci_core_drg_route_distributions.drg_route_distributions[each.value.import_drg_route_distribution_id].drg_route_distributions[0].id : merge(module.drg-route-distributions.*...)[each.value.import_drg_route_distribution_id]["drg_route_distribution_tf_id"])) : null is_ecmp_enabled = each.value.is_ecmp_enabled != null ? each.value.is_ecmp_enabled : null } @@ -530,10 +530,10 @@ module "drg-route-rules" { for_each = (var.drg_route_rules != null || var.drg_route_rules != {}) ? var.drg_route_rules : {} #Required - drg_route_table_id = length(regexall("ocid1.drgroutetable.oc1*", each.value.drg_route_table_id)) > 0 ? each.value.drg_route_table_id : ((each.value.drg_route_table_id != "" && each.value.drg_route_table_id != null) ? (length(regexall(".Autogenerated-Drg-Route-Table-for*", each.value.drg_route_table_id)) > 0 ? data.oci_core_drg_route_tables.drg_route_tables[each.value.drg_route_table_id].drg_route_tables[0].id : merge(module.drg-route-tables.*...)[each.value.drg_route_table_id]["drg_route_table_tf_id"]) : null) + drg_route_table_id = length(regexall("ocid1.drgroutetable.oc*", each.value.drg_route_table_id)) > 0 ? each.value.drg_route_table_id : ((each.value.drg_route_table_id != "" && each.value.drg_route_table_id != null) ? (length(regexall(".Autogenerated-Drg-Route-Table-for*", each.value.drg_route_table_id)) > 0 ? data.oci_core_drg_route_tables.drg_route_tables[each.value.drg_route_table_id].drg_route_tables[0].id : merge(module.drg-route-tables.*...)[each.value.drg_route_table_id]["drg_route_table_tf_id"]) : null) destination = each.value.destination destination_type = each.value.destination_type - next_hop_drg_attachment_id = length(regexall("ocid1.drgattachment.oc1*", each.value.next_hop_drg_attachment_id)) > 0 ? each.value.next_hop_drg_attachment_id : (each.value.next_hop_drg_attachment_id != "" && each.value.next_hop_drg_attachment_id != null ? merge(module.drg-attachments.*...)[each.value.next_hop_drg_attachment_id]["drg_attachment_tf_id"] : null) + next_hop_drg_attachment_id = length(regexall("ocid1.drgattachment.oc*", each.value.next_hop_drg_attachment_id)) > 0 ? each.value.next_hop_drg_attachment_id : (each.value.next_hop_drg_attachment_id != "" && each.value.next_hop_drg_attachment_id != null ? merge(module.drg-attachments.*...)[each.value.next_hop_drg_attachment_id]["drg_attachment_tf_id"] : null) } @@ -556,7 +556,7 @@ module "drg-route-distributions" { #Required distribution_type = each.value.distribution_type - drg_id = each.value.drg_id != null && each.value.drg_id != "" ? (length(regexall("ocid1.drg.oc1*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"]) : null + drg_id = each.value.drg_id != null && each.value.drg_id != "" ? (length(regexall("ocid1.drg.oc*", each.value.drg_id)) > 0 ? each.value.drg_id : merge(module.drgs.*...)[each.value.drg_id]["drg_tf_id"]) : null #Optional defined_tags = each.value.defined_tags @@ -581,7 +581,7 @@ module "drg-route-distribution-statements" { #Required key_name = each.key - drg_route_distribution_id = each.value.drg_route_distribution_id != null && each.value.drg_route_distribution_id != "" ? (length(regexall("ocid1.drgroutedistribution.oc1*", each.value.drg_route_distribution_id)) > 0 ? each.value.drg_route_distribution_id : (length(regexall(".Autogenerated-Import-Route-Distribution-for*", each.value.drg_route_distribution_id)) > 0 ? data.oci_core_drg_route_distributions.drg_route_distributions[each.value.drg_route_distribution_id].drg_route_distributions[0].id : merge(module.drg-route-distributions.*...)[each.value.drg_route_distribution_id]["drg_route_distribution_tf_id"])) : null + drg_route_distribution_id = each.value.drg_route_distribution_id != null && each.value.drg_route_distribution_id != "" ? (length(regexall("ocid1.drgroutedistribution.oc*", each.value.drg_route_distribution_id)) > 0 ? each.value.drg_route_distribution_id : (length(regexall(".Autogenerated-Import-Route-Distribution-for*", each.value.drg_route_distribution_id)) > 0 ? data.oci_core_drg_route_distributions.drg_route_distributions[each.value.drg_route_distribution_id].drg_route_distributions[0].id : merge(module.drg-route-distributions.*...)[each.value.drg_route_distribution_id]["drg_route_distribution_tf_id"])) : null priority = each.value.priority action = each.value.action drg_attachment_ids = merge(module.drg-attachments.*...) @@ -607,8 +607,8 @@ module "subnets" { #Required tenancy_ocid = var.tenancy_ocid - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - vcn_id = length(regexall("ocid1.vcn.oc1*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + vcn_id = length(regexall("ocid1.vcn.oc*", each.value.vcn_id)) > 0 ? each.value.vcn_id : merge(module.vcns.*...)[each.value.vcn_id]["vcn_tf_id"] cidr_block = each.value.cidr_block #Optional @@ -620,8 +620,8 @@ module "subnets" { prohibit_internet_ingress = each.value.prohibit_internet_ingress prohibit_public_ip_on_vnic = each.value.prohibit_public_ip_on_vnic availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" - dhcp_options_id = each.value.dhcp_options_id == null || each.value.dhcp_options_id == "" ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_dhcp_id"] : (length(regexall("ocid1.dhcpoptions.oc1*", each.value.dhcp_options_id)) > 0 ? each.value.dhcp_options_id : merge(module.custom-dhcps.*...)[each.value.dhcp_options_id]["custom_dhcp_tf_id"]) - route_table_id = each.value.route_table_id == null || each.value.route_table_id == "" ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] : (length(regexall("ocid1.routetable.oc1*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"]) + dhcp_options_id = each.value.dhcp_options_id == null || each.value.dhcp_options_id == "" ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_dhcp_id"] : (length(regexall("ocid1.dhcpoptions.oc*", each.value.dhcp_options_id)) > 0 ? each.value.dhcp_options_id : merge(module.custom-dhcps.*...)[each.value.dhcp_options_id]["custom_dhcp_tf_id"]) + route_table_id = each.value.route_table_id == null || each.value.route_table_id == "" ? merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_route_table_id"] : (length(regexall("ocid1.routetable.oc*", each.value.route_table_id)) > 0 ? each.value.route_table_id : merge(module.route-tables.*...)[each.value.route_table_id]["route_table_ids"]) security_list_ids = length(each.value.security_list_ids) == 0 ? [merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_security_list_id"]] : each.value.security_list_ids vcn_default_security_list_id = merge(module.vcns.*...)[each.value.vcn_id]["vcn_default_security_list_id"] custom_security_list_id = merge(module.security-lists.*...) @@ -644,7 +644,7 @@ module "vcn-log-groups" { # Log Groups #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name @@ -667,9 +667,9 @@ module "vcn-logs" { # Logs #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name - log_group_id = length(regexall("ocid1.loggroup.oc1*", each.value.log_group_id)) > 0 ? each.value.log_group_id : merge(module.vcn-log-groups.*...)[each.value.log_group_id]["log_group_tf_id"] + log_group_id = length(regexall("ocid1.loggroup.oc*", each.value.log_group_id)) > 0 ? each.value.log_group_id : merge(module.vcn-log-groups.*...)[each.value.log_group_id]["log_group_tf_id"] log_type = each.value.log_type #Required diff --git a/cd3_automation_toolkit/user-scripts/terraform/networkloadbalancer.tf b/cd3_automation_toolkit/user-scripts/terraform/networkloadbalancer.tf index 5dbf7e1e6..cdbe7eb59 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/networkloadbalancer.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/networkloadbalancer.tf @@ -8,7 +8,7 @@ data "oci_core_subnets" "oci_subnets_nlb" { # depends_on = [module.subnets] # Uncomment to create Network and NLBs together for_each = var.network_load_balancers != null ? var.network_load_balancers : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.subnet_id vcn_id = data.oci_core_vcns.oci_vcns_nlb[each.key].virtual_networks.*.id[0] } @@ -16,25 +16,27 @@ data "oci_core_subnets" "oci_subnets_nlb" { data "oci_core_vcns" "oci_vcns_nlb" { # depends_on = [module.vcns] # Uncomment to create Network and NLBs together for_each = var.network_load_balancers != null ? var.network_load_balancers : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } module "network-load-balancers" { + # depends_on = [module.nsgs] # Uncomment to create NSG and NLBs together source = "./modules/networkloadbalancer/nlb" for_each = var.network_load_balancers != null ? var.network_load_balancers : {} - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name - subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc1*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_nlb[each.key].subnets.*.id[0]) : null + subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_nlb[each.key].subnets.*.id[0]) : null is_preserve_source_destination = each.value.is_preserve_source_destination + is_symmetric_hash_enabled = each.value.is_symmetric_hash_enabled is_private = each.value.is_private network_security_group_ids = each.value.nsg_ids nlb_ip_version = each.value.nlb_ip_version vcn_name = each.value.vcn_name defined_tags = each.value.defined_tags freeform_tags = each.value.freeform_tags - reserved_ips_id = each.value.reserved_ips_id != "" && lower(each.value.reserved_ips_id) != "n" ? (length(regexall("ocid1.publicip.oc1*", each.value.reserved_ips_id)) > 0 ? [each.value.reserved_ips_id] : [merge(module.nlb-reserved-ips.*...)[join("-", [each.key, "reserved", "ip"])].reserved_ip_tf_id]) : [] + reserved_ips_id = each.value.reserved_ips_id != "" && lower(each.value.reserved_ips_id) != "n" ? (length(regexall("ocid1.publicip.oc*", each.value.reserved_ips_id)) > 0 ? [each.value.reserved_ips_id] : [merge(module.nlb-reserved-ips.*...)[join("-", [each.key, "reserved", "ip"])].reserved_ip_tf_id]) : [] } module "nlb-listeners" { @@ -42,7 +44,7 @@ module "nlb-listeners" { for_each = var.nlb_listeners != null ? var.nlb_listeners : {} name = each.value.name default_backend_set_name = merge(module.nlb-backend-sets.*...)[each.value.default_backend_set_name].nlb_backend_set_tf_name - network_load_balancer_id = length(regexall("ocid1.networkloadbalancer.oc1*", each.value.network_load_balancer_id)) > 0 ? each.value.network_load_balancer_id : merge(module.network-load-balancers.*...)[each.value.network_load_balancer_id]["network_load_balancer_tf_id"] + network_load_balancer_id = length(regexall("ocid1.networkloadbalancer.oc*", each.value.network_load_balancer_id)) > 0 ? each.value.network_load_balancer_id : merge(module.network-load-balancers.*...)[each.value.network_load_balancer_id]["network_load_balancer_tf_id"] port = each.value.port protocol = each.value.protocol ip_version = each.value.ip_version @@ -52,7 +54,7 @@ module "nlb-backend-sets" { source = "./modules/networkloadbalancer/nlb-backendset" for_each = var.nlb_backend_sets != null ? var.nlb_backend_sets : {} name = each.value.name - network_load_balancer_id = length(regexall("ocid1.networkloadbalancer.oc1*", each.value.network_load_balancer_id)) > 0 ? each.value.network_load_balancer_id : merge(module.network-load-balancers.*...)[each.value.network_load_balancer_id]["network_load_balancer_tf_id"] + network_load_balancer_id = length(regexall("ocid1.networkloadbalancer.oc*", each.value.network_load_balancer_id)) > 0 ? each.value.network_load_balancer_id : merge(module.network-load-balancers.*...)[each.value.network_load_balancer_id]["network_load_balancer_tf_id"] policy = each.value.policy ip_version = each.value.ip_version is_preserve_source = each.value.is_preserve_source @@ -60,7 +62,9 @@ module "nlb-backend-sets" { protocol = each.value.protocol interval_in_millis = each.value.interval_in_millis port = each.value.port + request_data = each.value.request_data response_body_regex = each.value.response_body_regex + response_data = each.value.response_data retries = each.value.retries return_code = each.value.return_code timeout_in_millis = each.value.timeout_in_millis @@ -69,16 +73,18 @@ module "nlb-backend-sets" { module "nlb-backends" { source = "./modules/networkloadbalancer/nlb-backend" + # depends_on = [module.instances] # Uncomment to create Network and NLBs together for_each = var.nlb_backends != null ? var.nlb_backends : {} backend_set_name = merge(module.nlb-backend-sets.*...)[each.value.backend_set_name]["nlb_backend_set_tf_name"] - network_load_balancer_id = length(regexall("ocid1.loadbalancer.oc1*", each.value.network_load_balancer_id)) > 0 ? each.value.network_load_balancer_id : merge(module.network-load-balancers.*...)[each.value.network_load_balancer_id]["network_load_balancer_tf_id"] + network_load_balancer_id = length(regexall("ocid1.loadbalancer.oc*", each.value.network_load_balancer_id)) > 0 ? each.value.network_load_balancer_id : merge(module.network-load-balancers.*...)[each.value.network_load_balancer_id]["network_load_balancer_tf_id"] port = each.value.port ip_address = each.value.ip_address - instance_compartment = each.value.instance_compartment != "" ? (length(regexall("ocid1.compartment.oc1*", each.value.instance_compartment)) > 0 ? each.value.instance_compartment : var.compartment_ocids[each.value.instance_compartment]) : var.tenancy_ocid + instance_compartment = each.value.instance_compartment != "" ? (length(regexall("ocid1.compartment.oc*", each.value.instance_compartment)) > 0 ? each.value.instance_compartment : var.compartment_ocids[each.value.instance_compartment]) : var.tenancy_ocid #ip_address = each.value.ip_address != "" ? (length(regexall("IP:", each.value.ip_address)) > 0 ? split("IP:", each.value.ip_address)[1] : data.oci_core_instance.nlb_instance_ip[each.key].private_ip) : (length(regexall("NAME:", each.value.ip_address)) > 0 ? split("NAME:", each.value.ip_address)[1] : data.oci_core_instance.nlb_instance[each.key].private_ip) : null is_drain = each.value.is_drain != "" ? each.value.is_drain : "false" + is_backup = each.value.is_backup != "" ? each.value.is_backup : "false" is_offline = each.value.is_offline != "" ? each.value.is_offline : "false" weight = each.value.weight != "" ? each.value.weight : "1" @@ -99,14 +105,14 @@ module "nlb-reserved-ips" { for_each = var.nlb_reserved_ips != null && var.nlb_reserved_ips != {} ? var.nlb_reserved_ips : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null lifetime = each.value.lifetime #Optional defined_tags = each.value.defined_tags display_name = each.value.display_name freeform_tags = each.value.freeform_tags - #private_ip_id = each.value.private_ip_id != "" ? (length(regexall("ocid1.privateip.oc1*", each.value.private_ip_id)) > 0 ? each.value.private_ip_id : (length(regexall("\\.", each.value.private_ip_id)) == 3 ? local.private_ip_id[0][each.value.private_ip_id] : merge(module.private-ips.*...)[each.value.private_ip_id].private_ip_tf_id)) : null - #public_ip_pool_id = each.value.public_ip_pool_id != "" ? (length(regexall("ocid1.publicippool.oc1*", each.value.public_ip_pool_id)) > 0 ? each.value.public_ip_pool_id : merge(module.public-ip-pools.*...)[each.value.public_ip_pool_id].public_ip_pool_tf_id) : null + #private_ip_id = each.value.private_ip_id != "" ? (length(regexall("ocid1.privateip.oc*", each.value.private_ip_id)) > 0 ? each.value.private_ip_id : (length(regexall("\\.", each.value.private_ip_id)) == 3 ? local.private_ip_id[0][each.value.private_ip_id] : merge(module.private-ips.*...)[each.value.private_ip_id].private_ip_tf_id)) : null + #public_ip_pool_id = each.value.public_ip_pool_id != "" ? (length(regexall("ocid1.publicippool.oc*", each.value.public_ip_pool_id)) > 0 ? each.value.public_ip_pool_id : merge(module.public-ip-pools.*...)[each.value.public_ip_pool_id].public_ip_pool_tf_id) : null } diff --git a/cd3_automation_toolkit/user-scripts/terraform/nsg.tf b/cd3_automation_toolkit/user-scripts/terraform/nsg.tf index 1618314ed..41bf93cc0 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/nsg.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/nsg.tf @@ -6,7 +6,7 @@ data "oci_core_vcns" "oci_vcns_nsgs" { # depends_on = [module.vcns] # Uncomment to create Network and NSGs together for_each = var.nsgs != null ? var.nsgs : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null display_name = each.value.vcn_name } @@ -17,7 +17,7 @@ module "nsgs" { for_each = (var.nsgs != null || var.nsgs != {}) ? var.nsgs : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null vcn_id = flatten(data.oci_core_vcns.oci_vcns_nsgs[each.key].virtual_networks.*.id)[0] defined_tags = each.value.defined_tags display_name = each.value.display_name @@ -36,7 +36,7 @@ module "nsg-rules" { depends_on = [module.nsgs] #Required - nsg_id = length(regexall("ocid1.networksecuritygroup.oc1*", each.value.nsg_id)) > 0 ? each.value.nsg_id : merge(module.nsgs.*...)[each.value.nsg_id]["nsg_tf_id"] + nsg_id = length(regexall("ocid1.networksecuritygroup.oc*", each.value.nsg_id)) > 0 ? each.value.nsg_id : merge(module.nsgs.*...)[each.value.nsg_id]["nsg_tf_id"] direction = (each.value.direction == "" && each.value.direction == null) ? "INGRESS" : each.value.direction protocol = each.value.protocol diff --git a/cd3_automation_toolkit/user-scripts/terraform/object-storage.tf b/cd3_automation_toolkit/user-scripts/terraform/object-storage.tf index d2a794390..fcac22999 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/object-storage.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/object-storage.tf @@ -16,7 +16,7 @@ module "oss-policies" { tenancy_ocid = var.tenancy_ocid policy_name = each.value.name - policy_compartment_id = each.value.compartment_id != "root" ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid + policy_compartment_id = each.value.compartment_id != "root" ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.tenancy_ocid policy_description = each.value.policy_description policy_statements = each.value.policy_statements @@ -42,7 +42,7 @@ module "oss-buckets" { for_each = var.buckets != null ? var.buckets : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null name = each.value.name namespace = data.oci_objectstorage_namespace.bucket_namespace.namespace @@ -51,7 +51,7 @@ module "oss-buckets" { auto_tiering = each.value.auto_tiering != "" ? each.value.auto_tiering : null # Defaults to 'Disabled' as per hashicorp terraform defined_tags = each.value.defined_tags != {} ? each.value.defined_tags : {} freeform_tags = each.value.freeform_tags != {} ? each.value.freeform_tags : {} - kms_key_id = each.value.kms_key_id != "" ? each.value.kms_key_id : null + kms_key_id = each.value.kms_key_id != "" ? each.value.kms_key_id : null #metadata = each.value.metadata != {} ? each.value.metadata : {} object_events_enabled = each.value.object_events_enabled != "" ? each.value.object_events_enabled : null # Defaults to 'false' as per hashicorp terraform storage_tier = each.value.storage_tier != "" ? each.value.storage_tier : null # Defaults to 'Standard' as per hashicorp terraform @@ -83,7 +83,7 @@ module "oss-log-groups" { # Log Groups #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name @@ -106,9 +106,9 @@ module "oss-logs" { # Logs #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null display_name = each.value.display_name - log_group_id = length(regexall("ocid1.loggroup.oc1*", each.value.log_group_id)) > 0 ? each.value.log_group_id : merge(module.oss-log-groups.*...)[each.value.log_group_id]["log_group_tf_id"] + log_group_id = length(regexall("ocid1.loggroup.oc*", each.value.log_group_id)) > 0 ? each.value.log_group_id : merge(module.oss-log-groups.*...)[each.value.log_group_id]["log_group_tf_id"] log_type = each.value.log_type #Required diff --git a/cd3_automation_toolkit/user-scripts/terraform/oke.tf b/cd3_automation_toolkit/user-scripts/terraform/oke.tf index 2ce4ebf85..1ad3fd48f 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/oke.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/oke.tf @@ -9,7 +9,7 @@ data "oci_core_subnets" "oci_subnets_endpoint" { # depends_on = [module.subnets] # Uncomment to create Network and OKE together for_each = var.clusters != null ? var.clusters : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.endpoint_subnet_id vcn_id = data.oci_core_vcns.oci_vcns_cluster[each.key].virtual_networks.*.id[0] } @@ -18,7 +18,7 @@ data "oci_core_subnets" "oci_subnets_endpoint" { data "oci_core_subnets" "oci_subnets_worker" { # depends_on = [module.subnets] # Uncomment to create Network and OKE together for_each = var.nodepools != null ? var.nodepools : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.subnet_id vcn_id = data.oci_core_vcns.oci_vcns_nodepool[each.key].virtual_networks.*.id[0] } @@ -26,7 +26,7 @@ data "oci_core_subnets" "oci_subnets_worker" { data "oci_core_subnets" "oci_subnets_pod" { # depends_on = [module.subnets] # Uncomment to create Network and OKE together for_each = var.nodepools != null ? var.nodepools : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.pod_subnet_ids vcn_id = data.oci_core_vcns.oci_vcns_nodepool[each.key].virtual_networks.*.id[0] } @@ -34,14 +34,14 @@ data "oci_core_subnets" "oci_subnets_pod" { data "oci_core_vcns" "oci_vcns_cluster" { # depends_on = [module.vcns] # Uncomment to create Network and OKE together for_each = var.clusters != null ? var.clusters : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } data "oci_core_vcns" "oci_vcns_nodepool" { # depends_on = [module.vcns] # Uncomment to create Network and OKE together for_each = var.nodepools != null ? var.nodepools : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } @@ -49,8 +49,8 @@ module "clusters" { source = "./modules/oke/cluster" for_each = var.clusters display_name = each.value.display_name - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id] - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.compartment_id] + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id] + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.compartment_id] vcn_names = [each.value.vcn_name] kubernetes_version = each.value.kubernetes_version is_kubernetes_dashboard_enabled = each.value.is_kubernetes_dashboard_enabled @@ -58,7 +58,7 @@ module "clusters" { cni_type = each.value.cni_type is_public_ip_enabled = each.value.is_public_ip_enabled nsg_ids = each.value.nsg_ids - endpoint_subnet_id = length(regexall("ocid1.subnet.oc1*", each.value.endpoint_subnet_id)) > 0 ? each.value.endpoint_subnet_id : data.oci_core_subnets.oci_subnets_endpoint[each.key].subnets.*.id[0] + endpoint_subnet_id = length(regexall("ocid1.subnet.oc*", each.value.endpoint_subnet_id)) > 0 ? each.value.endpoint_subnet_id : data.oci_core_subnets.oci_subnets_endpoint[each.key].subnets.*.id[0] is_pod_security_policy_enabled = each.value.is_pod_security_policy_enabled pods_cidr = each.value.pods_cidr services_cidr = each.value.services_cidr @@ -74,24 +74,24 @@ module "nodepools" { tenancy_ocid = var.tenancy_ocid display_name = each.value.display_name availability_domain = each.value.availability_domain - cluster_name = length(regexall("ocid1.cluster.oc1*", each.value.cluster_name)) > 0 ? each.value.cluster_name : merge(module.clusters.*...)[each.value.cluster_name]["cluster_tf_id"] - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + cluster_name = length(regexall("ocid1.cluster.oc*", each.value.cluster_name)) > 0 ? each.value.cluster_name : merge(module.clusters.*...)[each.value.cluster_name]["cluster_tf_id"] + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null vcn_names = [each.value.vcn_name] node_shape = each.value.node_shape initial_node_labels = each.value.initial_node_labels kubernetes_version = each.value.kubernetes_version - subnet_id = length(regexall("ocid1.subnet.oc1*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_worker[each.key].subnets.*.id[0] + subnet_id = length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_subnets_worker[each.key].subnets.*.id[0] size = each.value.size is_pv_encryption_in_transit_enabled = each.value.is_pv_encryption_in_transit_enabled cni_type = each.value.cni_type max_pods_per_node = each.value.max_pods_per_node pod_nsg_ids = each.value.pod_nsg_ids - pod_subnet_ids = each.value.pod_subnet_ids != null ? (length(regexall("ocid1.subnet.oc1*", each.value.pod_subnet_ids)) > 0 ? each.value.pod_subnet_ids : data.oci_core_subnets.oci_subnets_pod[each.key].subnets.*.id[0]) : null + pod_subnet_ids = each.value.pod_subnet_ids != null ? (length(regexall("ocid1.subnet.oc*", each.value.pod_subnet_ids)) > 0 ? each.value.pod_subnet_ids : data.oci_core_subnets.oci_subnets_pod[each.key].subnets.*.id[0]) : null worker_nsg_ids = each.value.worker_nsg_ids memory_in_gbs = each.value.memory_in_gbs ocpus = each.value.ocpus - image_id = length(regexall("ocid1.image.oc1*", each.value.image_id)) > 0 ? each.value.image_id : var.oke_source_ocids[each.value.image_id] + image_id = length(regexall("ocid1.image.oc*", each.value.image_id)) > 0 ? each.value.image_id : var.oke_source_ocids[each.value.image_id] source_type = each.value.source_type boot_volume_size_in_gbs = each.value.boot_volume_size_in_gbs ssh_public_key = each.value.ssh_public_key != null ? (length(regexall("ssh-rsa*", each.value.ssh_public_key)) > 0 ? each.value.ssh_public_key : lookup(var.oke_ssh_keys, each.value.ssh_public_key, null)) : null diff --git a/cd3_automation_toolkit/user-scripts/terraform/provider.tf b/cd3_automation_toolkit/user-scripts/terraform/provider.tf index fed8d4788..3fe96d1bf 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/provider.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/provider.tf @@ -19,7 +19,7 @@ terraform { oci = { source = "oracle/oci" #version = ">= 4.0.0" - version = "5.31.0" + version = "5.35.0" } } } diff --git a/cd3_automation_toolkit/user-scripts/terraform/sddc.tf b/cd3_automation_toolkit/user-scripts/terraform/sddc.tf index 8ffeffb78..a0ac66449 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/sddc.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/sddc.tf @@ -8,67 +8,67 @@ locals { vlan_config = flatten([for index in local.vlan_ids : [ for key, val in var.sddcs : { #(index) = lookup(val, index, 0) - compartment_id = val.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", val.network_compartment_id)) > 0 ? val.network_compartment_id : var.compartment_ocids[val.network_compartment_id]) : null + compartment_id = val.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", val.network_compartment_id)) > 0 ? val.network_compartment_id : var.compartment_ocids[val.network_compartment_id]) : null display_name = lookup(val, index, 0) vcn_id = data.oci_core_vcns.oci_vcns_sddc[key].virtual_networks.*.id[0] } ]]) - ds_vols = flatten([ for key, val in var.sddcs : [ - for item in concat(local.mgmt_vols[val.display_name],local.wkld_vols[val.display_name]): { - volume_display_name = item.volume_display_name - volume_compartment_id = item.volume_compartment_id - } + ds_vols = flatten([for key, val in var.sddcs : [ + for item in concat(local.mgmt_vols[val.display_name], local.wkld_vols[val.display_name]) : { + volume_display_name = item.volume_display_name + volume_compartment_id = item.volume_compartment_id + } ]]) mgmt_vols = { for key, val in var.sddcs : - val.display_name => try([for item in val.management_datastore: { - volume_compartment_id = try(split("@", item)[0],null) - volume_display_name = try(split("@", item)[1],null) - }],[])} + val.display_name => try([for item in val.management_datastore : { + volume_compartment_id = try(split("@", item)[0], null) + volume_display_name = try(split("@", item)[1], null) + }], []) } wkld_vols = { for key, val in var.sddcs : - val.display_name => try([ for item in val.workload_datastore: - { - volume_compartment_id = try(split("@", item)[0],null) - volume_display_name = try(split("@", item)[1],null) - }] ,[])} + val.display_name => try([for item in val.workload_datastore : + { + volume_compartment_id = try(split("@", item)[0], null) + volume_display_name = try(split("@", item)[1], null) + }], []) } - management_datastores = { for key,val in var.sddcs : key => (val.management_datastore != null ? [for value in val.management_datastore: data.oci_core_volumes.ds_volumes[split("@", value)[1]].volumes.*.id[0]] : []) + management_datastores = { for key, val in var.sddcs : key => (val.management_datastore != null ? [for value in val.management_datastore : data.oci_core_volumes.ds_volumes[split("@", value)[1]].volumes.*.id[0]] : []) } - workload_datastores = {for key,val in var.sddcs: key => (val.workload_datastore != null ? [for value in val.workload_datastore: data.oci_core_volumes.ds_volumes[split("@", value)[1]].volumes.*.id[0]] : []) - } - } + workload_datastores = { for key, val in var.sddcs : key => (val.workload_datastore != null ? [for value in val.workload_datastore : data.oci_core_volumes.ds_volumes[split("@", value)[1]].volumes.*.id[0]] : []) + } +} data "oci_core_volumes" "ds_volumes" { - for_each = {for value in local.ds_vols : value.volume_display_name => value.volume_compartment_id if value.volume_display_name != null } - compartment_id = each.value != null ? (length(regexall("ocid1.compartment.oc1*", each.value)) > 0 ? each.value : var.compartment_ocids[each.value]) : var.compartment_ocids[each.value] - display_name = each.key - state = "AVAILABLE" + for_each = { for value in local.ds_vols : value.volume_display_name => value.volume_compartment_id if value.volume_display_name != null } + compartment_id = each.value != null ? (length(regexall("ocid1.compartment.oc*", each.value)) > 0 ? each.value : var.compartment_ocids[each.value]) : var.compartment_ocids[each.value] + display_name = each.key + state = "AVAILABLE" } data "oci_core_vcns" "oci_vcns_sddc" { - # depends_on = [module.vcns] # Uncomment to create Network and Instances together + # depends_on = [module.vcns] # Uncomment to create Network and SDDC together for_each = var.sddcs != null ? var.sddcs : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.vcn_name } data "oci_core_subnets" "oci_subnets_sddc" { - # depends_on = [module.subnets] # Uncomment to create Network and Instances together + # depends_on = [module.subnets] # Uncomment to create Network and SDDC together for_each = var.sddcs != null ? var.sddcs : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.provisioning_subnet_id vcn_id = data.oci_core_vcns.oci_vcns_sddc[each.key].virtual_networks.*.id[0] } data "oci_core_vlans" "sddc_vlan_id" { #Required - for_each = { for vlan in local.vlan_config : vlan.display_name => vlan if vlan.display_name != null} + for_each = { for vlan in local.vlan_config : vlan.display_name => vlan if vlan.display_name != null } compartment_id = each.value.compartment_id display_name = each.key vcn_id = each.value.vcn_id @@ -76,23 +76,23 @@ data "oci_core_vlans" "sddc_vlan_id" { module "sddcs" { - #depends_on = [module.vlans] + # depends_on = [module.vlans] # Uncomment to create Network and SDDC together source = "./modules/sddc" for_each = var.sddcs != null ? var.sddcs : {} - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null compute_availability_domain = each.value.availability_domain == "multi-AD" ? each.value.availability_domain : (each.value.availability_domain != "" && each.value.availability_domain != null) ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" esxi_hosts_count = each.value.esxi_hosts_count != "" ? each.value.esxi_hosts_count : null - nsx_edge_uplink1vlan_id = each.value.nsx_edge_uplink1vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.nsx_edge_uplink1vlan_id)) > 0 ? each.value.nsx_edge_uplink1vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_edge_uplink1vlan_id].vlans[0].id) : null - nsx_edge_uplink2vlan_id = each.value.nsx_edge_uplink2vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.nsx_edge_uplink2vlan_id)) > 0 ? each.value.nsx_edge_uplink2vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_edge_uplink2vlan_id].vlans[0].id) : null - nsx_edge_vtep_vlan_id = each.value.nsx_edge_vtep_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.nsx_edge_vtep_vlan_id)) > 0 ? each.value.nsx_edge_vtep_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_edge_vtep_vlan_id].vlans[0].id) : null - nsx_vtep_vlan_id = each.value.nsx_vtep_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.nsx_vtep_vlan_id)) > 0 ? each.value.nsx_vtep_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_vtep_vlan_id].vlans[0].id) : null - provisioning_subnet_id = each.value.provisioning_subnet_id != "" ? (length(regexall("ocid1.subnet.oc1*", each.value.provisioning_subnet_id)) > 0 ? each.value.provisioning_subnet_id : data.oci_core_subnets.oci_subnets_sddc[each.key].subnets.*.id[0]) : null + nsx_edge_uplink1vlan_id = each.value.nsx_edge_uplink1vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.nsx_edge_uplink1vlan_id)) > 0 ? each.value.nsx_edge_uplink1vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_edge_uplink1vlan_id].vlans[0].id) : null + nsx_edge_uplink2vlan_id = each.value.nsx_edge_uplink2vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.nsx_edge_uplink2vlan_id)) > 0 ? each.value.nsx_edge_uplink2vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_edge_uplink2vlan_id].vlans[0].id) : null + nsx_edge_vtep_vlan_id = each.value.nsx_edge_vtep_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.nsx_edge_vtep_vlan_id)) > 0 ? each.value.nsx_edge_vtep_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_edge_vtep_vlan_id].vlans[0].id) : null + nsx_vtep_vlan_id = each.value.nsx_vtep_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.nsx_vtep_vlan_id)) > 0 ? each.value.nsx_vtep_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.nsx_vtep_vlan_id].vlans[0].id) : null + provisioning_subnet_id = each.value.provisioning_subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.provisioning_subnet_id)) > 0 ? each.value.provisioning_subnet_id : data.oci_core_subnets.oci_subnets_sddc[each.key].subnets.*.id[0]) : null ssh_authorized_keys = each.value.ssh_authorized_keys != null ? (length(regexall("ssh-rsa*", each.value.ssh_authorized_keys)) > 0 ? each.value.ssh_authorized_keys : lookup(var.sddc_ssh_keys, each.value.ssh_authorized_keys, null)) : null - vmotion_vlan_id = each.value.vmotion_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.vmotion_vlan_id)) > 0 ? each.value.vmotion_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.vmotion_vlan_id].vlans[0].id) : null + vmotion_vlan_id = each.value.vmotion_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.vmotion_vlan_id)) > 0 ? each.value.vmotion_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.vmotion_vlan_id].vlans[0].id) : null vmware_software_version = each.value.vmware_software_version != "" ? each.value.vmware_software_version : null - vsan_vlan_id = each.value.vsan_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.vsan_vlan_id)) > 0 ? each.value.vsan_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.vsan_vlan_id].vlans[0].id) : null - vsphere_vlan_id = each.value.vsphere_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.vsphere_vlan_id)) > 0 ? each.value.vsphere_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.vsphere_vlan_id].vlans[0].id) : null + vsan_vlan_id = each.value.vsan_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.vsan_vlan_id)) > 0 ? each.value.vsan_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.vsan_vlan_id].vlans[0].id) : null + vsphere_vlan_id = each.value.vsphere_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.vsphere_vlan_id)) > 0 ? each.value.vsphere_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.vsphere_vlan_id].vlans[0].id) : null #Optional initial_host_ocpu_count = each.value.initial_host_ocpu_count != "" ? each.value.initial_host_ocpu_count : null initial_host_shape_name = each.value.initial_host_shape_name != "" ? each.value.initial_host_shape_name : null @@ -101,15 +101,15 @@ module "sddcs" { defined_tags = each.value.defined_tags != {} ? each.value.defined_tags : {} freeform_tags = each.value.freeform_tags != {} ? each.value.freeform_tags : {} hcx_action = each.value.hcx_action != "" ? each.value.hcx_action : null - hcx_vlan_id = each.value.hcx_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.hcx_vlan_id)) > 0 ? each.value.hcx_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.hcx_vlan_id].vlans[0].id) : null + hcx_vlan_id = each.value.hcx_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.hcx_vlan_id)) > 0 ? each.value.hcx_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.hcx_vlan_id].vlans[0].id) : null initial_sku = each.value.initial_sku != "" ? each.value.initial_sku : null instance_display_name_prefix = each.value.instance_display_name_prefix != "" ? each.value.instance_display_name_prefix : null is_hcx_enabled = each.value.is_hcx_enabled != "" ? each.value.is_hcx_enabled : null is_shielded_instance_enabled = each.value.is_shielded_instance_enabled != "" ? each.value.is_shielded_instance_enabled : null is_single_host_sddc = each.value.is_single_host_sddc != "" ? each.value.is_single_host_sddc : null - provisioning_vlan_id = each.value.provisioning_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.provisioning_vlan_id)) > 0 ? each.value.provisioning_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.provisioning_vlan_id].vlans[0].id) : null + provisioning_vlan_id = each.value.provisioning_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.provisioning_vlan_id)) > 0 ? each.value.provisioning_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.provisioning_vlan_id].vlans[0].id) : null refresh_hcx_license_status = each.value.refresh_hcx_license_status != "" ? each.value.refresh_hcx_license_status : null - replication_vlan_id = each.value.replication_vlan_id != null ? (length(regexall("ocid1.vlan.oc1*", each.value.replication_vlan_id)) > 0 ? each.value.replication_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.replication_vlan_id].vlans[0].id) : null + replication_vlan_id = each.value.replication_vlan_id != null ? (length(regexall("ocid1.vlan.oc*", each.value.replication_vlan_id)) > 0 ? each.value.replication_vlan_id : data.oci_core_vlans.sddc_vlan_id[each.value.replication_vlan_id].vlans[0].id) : null reserving_hcx_on_premise_license_keys = each.value.reserving_hcx_on_premise_license_keys != "" ? each.value.reserving_hcx_on_premise_license_keys : null workload_network_cidr = each.value.workload_network_cidr != "" ? each.value.workload_network_cidr : null management_datastore = local.management_datastores[each.key] != null ? local.management_datastores[each.key] : [] diff --git a/cd3_automation_toolkit/user-scripts/terraform/tagging.tf b/cd3_automation_toolkit/user-scripts/terraform/tagging.tf index 9fde70ffc..6e5008092 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/tagging.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/tagging.tf @@ -10,7 +10,7 @@ module "tag-namespaces" { for_each = (var.tag_namespaces != null || var.tag_namespaces != {}) ? var.tag_namespaces : {} #Required - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null description = each.value.description != "" ? each.value.description : each.value.name name = each.value.name @@ -26,7 +26,7 @@ module "tag-keys" { for_each = (var.tag_keys != null || var.tag_keys != {}) ? var.tag_keys : {} #Required - tag_namespace_id = length(regexall("ocid1.tagnamespace.oc1*", each.value.tag_namespace_id)) > 0 ? each.value.tag_namespace_id : merge(module.tag-namespaces.*...)[each.value.tag_namespace_id]["namespace_tf_id"] + tag_namespace_id = length(regexall("ocid1.tagnamespace.oc*", each.value.tag_namespace_id)) > 0 ? each.value.tag_namespace_id : merge(module.tag-namespaces.*...)[each.value.tag_namespace_id]["namespace_tf_id"] description = each.value.description != "" ? each.value.description : each.value.name name = each.value.name @@ -44,8 +44,8 @@ module "tag-defaults" { for_each = (var.tag_defaults != null || var.tag_defaults != {}) ? var.tag_defaults : {} #Required - compartment_id = length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : try(zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.compartment_id], var.compartment_ocids[each.value.compartment_id]) - tag_definition_id = length(regexall("ocid1.tagdefinition.oc1*", each.value.tag_definition_id)) > 0 ? each.value.tag_definition_id : merge(module.tag-keys.*...)[each.value.tag_definition_id]["tag_key_tf_id"] + compartment_id = length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : try(zipmap(data.oci_identity_compartments.compartments.compartments.*.name, data.oci_identity_compartments.compartments.compartments.*.id)[each.value.compartment_id], var.compartment_ocids[each.value.compartment_id]) + tag_definition_id = length(regexall("ocid1.tagdefinition.oc*", each.value.tag_definition_id)) > 0 ? each.value.tag_definition_id : merge(module.tag-keys.*...)[each.value.tag_definition_id]["tag_key_tf_id"] value = each.value.value #Optional diff --git a/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf b/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf index e5a009cf9..a4007bb15 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf @@ -609,73 +609,73 @@ variable "data_drg_route_table_distributions" { #################### variable "zones" { -type = map(object({ -compartment_id = string -display_name = string -view_compartment_id = optional(string) -view_id = optional(string) -zone_type = optional(string) -scope = optional(string) -freeform_tags = optional(map(any)) -defined_tags = optional(map(any)) -})) -default = {} + type = map(object({ + compartment_id = string + display_name = string + view_compartment_id = optional(string) + view_id = optional(string) + zone_type = optional(string) + scope = optional(string) + freeform_tags = optional(map(any)) + defined_tags = optional(map(any)) + })) + default = {} } variable "views" { -type = map(object({ -compartment_id = string -display_name = string -scope = optional(string) -freeform_tags = optional(map(any)) -defined_tags = optional(map(any)) -})) + type = map(object({ + compartment_id = string + display_name = string + scope = optional(string) + freeform_tags = optional(map(any)) + defined_tags = optional(map(any)) + })) default = {} } variable "rrsets" { -type = map(object({ -compartment_id = optional(string) -view_compartment_id = optional(string) -view_id = optional(string) -zone_id = string -domain = string -rtype = string -ttl = number -rdata = optional(list(string)) -scope = optional(string) -})) -default = {} + type = map(object({ + compartment_id = optional(string) + view_compartment_id = optional(string) + view_id = optional(string) + zone_id = string + domain = string + rtype = string + ttl = number + rdata = optional(list(string)) + scope = optional(string) + })) + default = {} } variable "resolvers" { -type = map(object({ -network_compartment_id= string -vcn_name = string -display_name = optional(string) -views = optional(map(object({ - view_id = optional(string) - view_compartment_id = optional(string) -}))) -resolver_rules = optional(map(object({ - client_address_conditions = optional(list(any)) - destination_addresses = optional(list(any)) - qname_cover_conditions = optional(list(any)) - source_endpoint_name = optional(string) -}))) -endpoint_names = optional(map(object({ - is_forwarding = optional(bool) - is_listening = optional(bool) - name = optional(string) - subnet_name = optional(string) - forwarding_address = optional(string) - listening_address = optional(string) - nsg_ids = optional(list(string)) -}))) -freeform_tags = optional(map(any)) -defined_tags = optional(map(any)) -})) -default = {} + type = map(object({ + network_compartment_id = string + vcn_name = string + display_name = optional(string) + views = optional(map(object({ + view_id = optional(string) + view_compartment_id = optional(string) + }))) + resolver_rules = optional(map(object({ + client_address_conditions = optional(list(any)) + destination_addresses = optional(list(any)) + qname_cover_conditions = optional(list(any)) + source_endpoint_name = optional(string) + }))) + endpoint_names = optional(map(object({ + is_forwarding = optional(bool) + is_listening = optional(bool) + name = optional(string) + subnet_name = optional(string) + forwarding_address = optional(string) + listening_address = optional(string) + nsg_ids = optional(list(string)) + }))) + freeform_tags = optional(map(any)) + defined_tags = optional(map(any)) + })) + default = {} } @@ -765,32 +765,32 @@ variable "instances" { policy_compartment_id = optional(string) network_type = optional(string) #extended_metadata = optional(string) - skip_source_dest_check = optional(bool) - baseline_ocpu_utilization = optional(string) + skip_source_dest_check = optional(bool) + baseline_ocpu_utilization = optional(string) #preemptible_instance_config = optional(string) - all_plugins_disabled = optional(bool) - is_management_disabled = optional(bool) - is_monitoring_disabled = optional(bool) - assign_private_dns_record = optional(string) - plugins_details = optional(map(any)) - is_live_migration_preferred = optional(bool) - recovery_action = optional(string) - are_legacy_imds_endpoints_disabled = optional(bool) - boot_volume_type = optional(string) - firmware = optional(string) - is_consistent_volume_naming_enabled = optional(bool) - remote_data_volume_type = optional(string) - platform_config = optional(list(map(any))) - launch_options = optional(list(map(any))) - ipxe_script = optional(string) - preserve_boot_volume = optional(bool) - vlan_id = optional(string) - kms_key_id = optional(string) - vnic_display_name = optional(string) - vnic_defined_tags = optional(map(any)) - vnic_freeform_tags = optional(map(any)) - defined_tags = optional(map(any)) - freeform_tags = optional(map(any)) + all_plugins_disabled = optional(bool) + is_management_disabled = optional(bool) + is_monitoring_disabled = optional(bool) + assign_private_dns_record = optional(string) + plugins_details = optional(map(any)) + is_live_migration_preferred = optional(bool) + recovery_action = optional(string) + are_legacy_imds_endpoints_disabled = optional(bool) + boot_volume_type = optional(string) + firmware = optional(string) + is_consistent_volume_naming_enabled = optional(bool) + remote_data_volume_type = optional(string) + platform_config = optional(list(map(any))) + launch_options = optional(list(map(any))) + ipxe_script = optional(string) + preserve_boot_volume = optional(bool) + vlan_id = optional(string) + kms_key_id = optional(string) + vnic_display_name = optional(string) + vnic_defined_tags = optional(map(any)) + vnic_freeform_tags = optional(map(any)) + defined_tags = optional(map(any)) + freeform_tags = optional(map(any)) })) default = {} } @@ -979,6 +979,7 @@ variable "backend_sets" { policy = string protocol = optional(string) interval_ms = optional(string) + is_force_plain_text = optional(string) port = optional(string) response_body_regex = optional(string) retries = optional(string) @@ -1172,6 +1173,7 @@ variable "network_load_balancers" { is_private = optional(bool) reserved_ips_id = string is_preserve_source_destination = optional(bool) + is_symmetric_hash_enabled = optional(bool) nlb_ip_version = optional(string) nsg_ids = optional(list(string)) defined_tags = optional(map(any)) @@ -1200,7 +1202,9 @@ variable "nlb_backend_sets" { return_code = optional(number) interval_in_millis = optional(number) port = optional(number) + request_data = optional(string) response_body_regex = optional(string) + response_data = optional(string) retries = optional(number) timeout_in_millis = optional(number) url_path = optional(string) @@ -1218,6 +1222,7 @@ variable "nlb_backends" { ip_address = string instance_compartment = string is_drain = optional(bool) + is_backup = optional(bool) is_offline = optional(bool) weight = optional(number) target_id = optional(string) @@ -1659,34 +1664,34 @@ variable "capacity_reservation_ocids" { ####### Firewall as a Service ####### ##################################### variable "firewalls" { - type = map(object({ - compartment_id = string - network_compartment_id = string + type = map(object({ + compartment_id = string + network_compartment_id = string network_firewall_policy_id = string - subnet_id = string - vcn_name = string - display_name = string - ipv4address = optional(string) - nsg_id = optional(list(string)) - ipv6address = optional(string) - availability_domain = optional(string) - defined_tags = optional(map(any)) - freeform_tags = optional(map(any)) + subnet_id = string + vcn_name = string + display_name = string + ipv4address = optional(string) + nsg_id = optional(list(string)) + ipv6address = optional(string) + availability_domain = optional(string) + defined_tags = optional(map(any)) + freeform_tags = optional(map(any)) })) default = {} } variable "fw-policies" { - type = map(object({ + type = map(object({ compartment_id = optional(string) display_name = optional(string) - defined_tags = optional(map(any)) - freeform_tags = optional(map(any)) + defined_tags = optional(map(any)) + freeform_tags = optional(map(any)) })) default = {} } variable "services" { - type = map(object({ + type = map(object({ service_name = string service_type = string network_firewall_policy_id = string @@ -1698,18 +1703,18 @@ variable "services" { default = {} } variable "url_lists" { - type = map(object({ + type = map(object({ urllist_name = string network_firewall_policy_id = string urls = list(object({ pattern = string - type = string + type = string })) })) default = {} } variable "service_lists" { - type = map(object({ + type = map(object({ service_list_name = string network_firewall_policy_id = string services = list(string) @@ -1718,40 +1723,40 @@ variable "service_lists" { } variable "address_lists" { - type = map(object({ + type = map(object({ address_list_name = string network_firewall_policy_id = string - address_type = string - addresses = list(string) + address_type = string + addresses = list(string) })) default = {} } variable "applications" { - type = map(object({ - app_list_name = string + type = map(object({ + app_list_name = string network_firewall_policy_id = string - app_type = string - icmp_type = number - icmp_code = optional(number) + app_type = string + icmp_type = number + icmp_code = optional(number) })) default = {} } variable "application_groups" { - type = map(object({ - app_group_name = string + type = map(object({ + app_group_name = string network_firewall_policy_id = string - apps = list(string) + apps = list(string) })) default = {} } variable "security_rules" { - type = map(object({ - action = string - rule_name = string + type = map(object({ + action = string + rule_name = string network_firewall_policy_id = string condition = optional(list(object({ application = optional(list(string)) @@ -1760,8 +1765,8 @@ variable "security_rules" { source_address = optional(list(string)) url = optional(list(string)) }))) - inspection = optional(string) - after_rule = optional(string) + inspection = optional(string) + after_rule = optional(string) before_rule = optional(string) })) @@ -1769,33 +1774,33 @@ variable "security_rules" { } variable "secrets" { - type = map(object({ - secret_name = string + type = map(object({ + secret_name = string network_firewall_policy_id = string - secret_source = string - secret_type = string - vault_secret_id = string - version_number = number - vault_name = string - vault_compartment_id = string + secret_source = string + secret_type = string + vault_secret_id = string + version_number = number + vault_name = string + vault_compartment_id = string })) default = {} } variable "decryption_profiles" { - type = map(object({ - profile_name = string - profile_type = string - network_firewall_policy_id = string + type = map(object({ + profile_name = string + profile_type = string + network_firewall_policy_id = string are_certificate_extensions_restricted = optional(bool) - is_auto_include_alt_name = optional(bool) - is_expired_certificate_blocked = optional(bool) - is_out_of_capacity_blocked = optional(bool) - is_revocation_status_timeout_blocked = optional(bool) - is_unknown_revocation_status_blocked = optional(bool) - is_unsupported_cipher_blocked = optional(bool) - is_unsupported_version_blocked = optional(bool) - is_untrusted_issuer_blocked = optional(bool) + is_auto_include_alt_name = optional(bool) + is_expired_certificate_blocked = optional(bool) + is_out_of_capacity_blocked = optional(bool) + is_revocation_status_timeout_blocked = optional(bool) + is_unknown_revocation_status_blocked = optional(bool) + is_unsupported_cipher_blocked = optional(bool) + is_unsupported_version_blocked = optional(bool) + is_untrusted_issuer_blocked = optional(bool) })) default = {} } @@ -1805,7 +1810,7 @@ variable "decryption_rules" { action = string rule_name = string network_firewall_policy_id = string - condition = optional(list(object({ + condition = optional(list(object({ destination_address = optional(list(string)) diff --git a/cd3_automation_toolkit/user-scripts/terraform/vlan.tf b/cd3_automation_toolkit/user-scripts/terraform/vlan.tf index 0fea1784d..ec9f98c7f 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/vlan.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/vlan.tf @@ -6,14 +6,14 @@ data "oci_core_route_tables" "oci_route_tables_vlans" { # depends_on = [module.route-tables] #Uncomment this if using single outdir for Network and VLANs for_each = var.vlans != null ? var.vlans : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] display_name = each.value.route_table_name vcn_id = data.oci_core_vcns.oci_vcns_vlans[each.key].virtual_networks.*.id[0] } data "oci_core_vcns" "oci_vcns_vlans" { for_each = var.vlans != null ? var.vlans : {} - compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null display_name = each.value.vcn_name } @@ -21,14 +21,14 @@ module "vlans" { source = "./modules/network/vlan" for_each = var.vlans != null ? var.vlans : {} - compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null - network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc1*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null cidr_block = each.value.cidr_block != "" ? each.value.cidr_block : null vcn_id = flatten(data.oci_core_vcns.oci_vcns_vlans[each.key].virtual_networks.*.id)[0] display_name = each.value.display_name != "" ? each.value.display_name : null nsg_ids = each.value.nsg_ids - route_table_id = each.value.route_table_name != null ? (length(regexall("ocid1.routeteable.oc1*", each.value.route_table_name)) > 0 ? each.value.route_table_name : data.oci_core_route_tables.oci_route_tables_vlans[each.key].route_tables.*.id[0]) : null - #route_table_id = each.value.route_table_name != null ? (length(regexall("ocid1.routeteable.oc1*", each.value.route_table_name)) > 0 ? each.value.route_table_name : try(data.oci_core_route_tables.oci_route_tables_vlans[each.key].route_tables.*.id[0], module.route-tables["${each.value.vcn_name}_${each.value.route_table_name}"]["route_table_ids"])): null + route_table_id = each.value.route_table_name != null ? (length(regexall("ocid1.routeteable.oc*", each.value.route_table_name)) > 0 ? each.value.route_table_name : data.oci_core_route_tables.oci_route_tables_vlans[each.key].route_tables.*.id[0]) : null + #route_table_id = each.value.route_table_name != null ? (length(regexall("ocid1.routeteable.oc*", each.value.route_table_name)) > 0 ? each.value.route_table_name : try(data.oci_core_route_tables.oci_route_tables_vlans[each.key].route_tables.*.id[0], module.route-tables["${each.value.vcn_name}_${each.value.route_table_name}"]["route_table_ids"])): null vlan_tag = each.value.vlan_tag != "" ? each.value.vlan_tag : null availability_domain = each.value.availability_domain != "" && each.value.availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.availability_domain].name : "" defined_tags = each.value.defined_tags diff --git a/jenkins_install/init/01_jenkins-config.groovy b/jenkins_install/init/01_jenkins-config.groovy index 6fd5e42c0..540a178e5 100644 --- a/jenkins_install/init/01_jenkins-config.groovy +++ b/jenkins_install/init/01_jenkins-config.groovy @@ -1,10 +1,9 @@ -import com.cloudbees.hudson.plugins.folder.* -//import org.jenkinsci.plugins.workflow.job.WorkflowJob import jenkins.model.Jenkins +import com.cloudbees.hudson.plugins.folder.* + Jenkins jenkins = Jenkins.instance def JENKINS_HOME = System.getenv("JENKINS_HOME") - File file = new File("$JENKINS_HOME/jenkins.properties") file.withReader { reader -> while ((line = reader.readLine()) != null) { @@ -21,223 +20,72 @@ file.withReader { reader -> services = Eval.me(line.split("=")[1]) } } - } + } + def tfApplyJobName = "terraform-apply" def tfDestroyJobName = "terraform-destroy" -for (os in outdir_structure) { - - def ost = jenkins.getItem("terraform_files") - if (ost == null) { - ost = jenkins.createProject(Folder.class,"terraform_files") - - def global = ost.getItem("global") - if (global == null) { - global = ost.createProject(Folder.class, "global") - - def rpc = global.getItem("rpc") - if (rpc == null) { - rpc = global.createProject(Folder.class, "rpc") - - def tfGlobRpcXml = -"""\ - +// Function to create job XML +def createJobXml(scriptPath, gitUrl) { + return """ + false - multiOutput.groovy - false - - - - ${git_url} - - - - - develop - - - 2 - false - Default - + ${scriptPath} + false + + + + ${gitUrl} + + + + + develop + + + 2 + false + Default + - -""" + + """ +} - def tfGlobRpcDestroyXml = -"""\ - - - - false - - multiOutput-tf-destroy.groovy - false - - - - ${git_url} - - - - - develop - - - 2 - false - Default - - - -""" - def tfGlobRpcXmlStream = new ByteArrayInputStream(tfGlobRpcXml.getBytes()) - job1 = rpc.createProjectFromXML(tfApplyJobName, tfGlobRpcXmlStream) - def tfGlobRpcDestroyXmlStream = new ByteArrayInputStream(tfGlobRpcDestroyXml.getBytes()) - job2 = rpc.createProjectFromXML(tfDestroyJobName, tfGlobRpcDestroyXmlStream) - - } -} - - for (reg in regions) { - def folder = ost.getItem(reg) - if (folder == null) { - folder = ost.createProject(Folder.class, reg) - if (os == "Single_Outdir"){ - def tfApplyXml = -"""\ - - - - false - - singleOutput.groovy - false - - - - ${git_url} - - - - - develop - - - 2 - false - Default - - - -""" +// Function to create Jenkins job +def createJob(parent, jobName, xml) { + def jobXmlStream = new ByteArrayInputStream(xml.getBytes()) + parent.createProjectFromXML(jobName, jobXmlStream) +} - def tfDestroyXml = -"""\ - - - - false - - singleOutput-tf-destroy.groovy - false - - - - ${git_url} - - - - - develop - - - 2 - false - Default - - - -""" +// Create jobs for each configuration +jenkins.with { + Folder ost = getItem("terraform_files") ?: createProject(Folder.class, "terraform_files") - def tfApplyxmlStream = new ByteArrayInputStream(tfApplyXml.getBytes()) - job1 = folder.createProjectFromXML(tfApplyJobName, tfApplyxmlStream) - def tfDestroyxmlStream = new ByteArrayInputStream(tfDestroyXml.getBytes()) - job2 = folder.createProjectFromXML(tfDestroyJobName, tfDestroyxmlStream) + for (os in outdir_structure) { + Folder global = ost.getItem("global") ?: ost.createProject(Folder.class, "global") + Folder rpc = global.getItem("rpc") ?: global.createProject(Folder.class, "rpc") - } - if (os == "Multiple_Outdir"){ - for (svc in services) { - def svobjt = folder.getItem(svc) - if (svobjt == null) { - svobjt = folder.createProject(Folder.class, svc) - def tfApplyXml = -"""\ - - - - false - - multiOutput.groovy - false - - - - ${git_url} - - - - - develop - - - 2 - false - Default - - - -""" + createJob(rpc, tfApplyJobName, createJobXml('tf-apply.groovy', git_url)) + createJob(rpc, tfDestroyJobName, createJobXml('tf-destroy.groovy', git_url)) + for (reg in regions) { + Folder folder = ost.getItem(reg) ?: ost.createProject(Folder.class, reg) - def tfDestroyXml = -"""\ - - - - false - - multiOutput-tf-destroy.groovy - false - - - - ${git_url} - - - - - develop - - - 2 - false - Default - - - -""" - def tfApplyxmlStream = new ByteArrayInputStream(tfApplyXml.getBytes()) - job1 = svobjt.createProjectFromXML(tfApplyJobName, tfApplyxmlStream) - def tfDestroyxmlStream = new ByteArrayInputStream(tfDestroyXml.getBytes()) - job2 = svobjt.createProjectFromXML(tfDestroyJobName, tfDestroyxmlStream) - } - } - } - } + if (os == "Single_Outdir") { + createJob(folder, tfApplyJobName, createJobXml('tf-apply.groovy', git_url)) + createJob(folder, tfDestroyJobName, createJobXml('tf-destroy.groovy', git_url)) } - } -} \ No newline at end of file + if (os == "Multiple_Outdir" && services) { + for (svc in services) { + Folder svcFolder = folder.getItem(svc) ?: folder.createProject(Folder.class, svc) + createJob(svcFolder, tfApplyJobName, createJobXml('tf-apply.groovy', git_url)) + createJob(svcFolder, tfDestroyJobName, createJobXml('tf-destroy.groovy', git_url)) + } + } + } +} +} diff --git a/jenkins_install/init/02_jenkins-view.groovy b/jenkins_install/init/02_jenkins-view.groovy index b7976c30b..edec49a67 100644 --- a/jenkins_install/init/02_jenkins-view.groovy +++ b/jenkins_install/init/02_jenkins-view.groovy @@ -1,55 +1,54 @@ - import jenkins.model.Jenkins - - def parentPath = "terraform_files" - def jenkinsInstance = Jenkins.getInstance() - - def findRegionFolders(jenkinsInstance, parentPath) { - def parent = jenkinsInstance.getItemByFullName(parentPath) - - if (parent != null && parent instanceof hudson.model.ViewGroup) { - return parent.items.findAll { it instanceof hudson.model.ViewGroup } - } else { - println("Parent folder not found: $parentPath") - return [] - } - } - - def addJobsToView(view, folder) { - folder.items.each { item -> - if (item instanceof hudson.model.Job) { - // println("Processing job: ${item.fullName}") - view.add(item) - } else if (item instanceof hudson.model.ViewGroup) { - // Recursively add jobs from subfolders - addJobsToView(view, item) - } - } - } - - def processRegionFolder(jenkinsInstance, regionFolder) { - def viewName = "${regionFolder.name}" - def view = jenkinsInstance.getView(viewName) - - if (view == null) { - // Create the view if it doesn't exist - view = new hudson.model.ListView(viewName, jenkinsInstance) - jenkinsInstance.addView(view) - } - - addJobsToView(view, regionFolder) - - // Set the "Recurse in folders" option - view.setRecurse(true) - - // Save the view configuration - view.save() - - println("View '$viewName' created successfully.") - } - - def regionFolders = findRegionFolders(jenkinsInstance, parentPath) - regionFolders.each { regionFolder -> - processRegionFolder(jenkinsInstance, regionFolder) - } - - println("Processing completed for all region folders.") +import jenkins.model.Jenkins + +def createRegionViews() { + def jenkinsInstance = Jenkins.getInstance() + if (jenkinsInstance == null) { + println("Jenkins instance not available.") + return + } + + def parentPath = "terraform_files" + def parent = jenkinsInstance.getItemByFullName(parentPath) + + if (parent != null && parent instanceof hudson.model.ViewGroup) { + parent.items.each { regionFolder -> + def viewName = regionFolder.name + def view = jenkinsInstance.getView(viewName) + + if (view == null) { + view = new hudson.model.ListView(viewName, jenkinsInstance) + jenkinsInstance.addView(view) + } + + // Clear the view to remove any existing jobs + view.items.clear() + + // Add jobs to the view + addJobsToView(view, regionFolder) + + // Set the "Recurse in folders" option + view.setRecurse(true) + + // Save the view configuration + view.save() + + println("View '$viewName' created successfully.") + } + } else { + println("Parent folder not found: $parentPath") + } +} + +def addJobsToView(hudson.model.ListView view, hudson.model.ViewGroup folder) { + folder.items.each { item -> + if (item instanceof hudson.model.Job) { + view.add(item) + } else if (item instanceof hudson.model.ViewGroup) { + // Recursively add jobs from sub-folders + addJobsToView(view, item) + } + } +} + +// function to create region views +createRegionViews() \ No newline at end of file diff --git a/jenkins_install/jcasc.yaml b/jenkins_install/jcasc.yaml index e81028cda..a7f8290dc 100644 --- a/jenkins_install/jcasc.yaml +++ b/jenkins_install/jcasc.yaml @@ -42,6 +42,11 @@ security: usageStatisticsEnabled: true globalJobDslSecurityConfiguration: useScriptSecurity: false + scriptApproval: + approvedSignatures: + - "method groovy.lang.GroovyObject invokeMethod java.lang.String java.lang.Object" + - "new java.io.File java.lang.String" + - "staticMethod org.codehaus.groovy.runtime.ResourceGroovyMethods readLines java.io.File" unclassified: buildDiscarders: configuredBuildDiscarders: diff --git a/jenkins_install/jenkins.sh b/jenkins_install/jenkins.sh index 13c726857..93b4c79fb 100644 --- a/jenkins_install/jenkins.sh +++ b/jenkins_install/jenkins.sh @@ -5,8 +5,10 @@ # Check if JENKINS_HOME exists if [ ! -d "$JENKINS_HOME" ]; then # If it doesn't exist, create it - mkdir -p "$JENKINS_HOME" - echo "Directory created: $JENKINS_HOME" + #mkdir -p "$JENKINS_HOME" + #echo "Directory created: $JENKINS_HOME" + echo "Jenkins should be configured only if Devops parameter is set during tenancy configuration for the toolkit" + exit fi # Copy Required files to JENKINS_HOME diff --git a/jenkins_install/multiOutput-tf-destroy.groovy b/jenkins_install/multiOutput-tf-destroy.groovy deleted file mode 100644 index a69ebb04e..000000000 --- a/jenkins_install/multiOutput-tf-destroy.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* Set the various stages of the build */ -def tf_plan = "Changes" -pipeline { - agent any - options { - ansiColor('xterm') - } - stages { - stage('Terraform Destroy Plan') { - when { - expression { return env.GIT_BRANCH == 'origin/develop';} - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - def jobName = env.JOB_NAME - def parts = jobName.split('/') - - // Assuming the job name format is /job//job/job_name - def regionName = parts[1] - def serviceName = parts[2] - - // Set environment variables for reuse in subsequent stages - env.Region = regionName - env.Service = serviceName - - sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform init -upgrade" - //sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform plan -destroy" - - - // Run Terraform plan - def terraformPlanOutput = sh(script: "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform plan -destroy", returnStdout: true).trim() - - // Check if the plan contains any changes - if (terraformPlanOutput.contains('No changes.')) { - echo 'No changes in Terraform plan. Skipping further stages.' - tf_plan = "No Changes" - } else { - // If there are changes, proceed with applying the plan - echo "Proceeding with destroy. \n${terraformPlanOutput}" - - } - - } - } - } - } - - /** Approval for Terraform Apply **/ - stage('Get Approval') { - when { - allOf{ - expression {return env.GIT_BRANCH == 'origin/develop'; } - expression {return tf_plan == "Changes" } - expression {return currentBuild.result != "FAILURE" } - } - } - input { - message "Do you want to perform terraform destroy?" - - } - steps { - echo "Approval for the Destroy Granted!" - } - } - - stage('Terraform Destroy') { - when { - allOf{ - expression {return env.GIT_BRANCH == 'origin/develop'; } - expression {return tf_plan == "Changes" } - expression {return currentBuild.result != "FAILURE" } - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform destroy --auto-approve" - } - } - } - } - - - /** Main branch commit to keep changes in Sync **/ - stage('Commit To Main') { - when { - allOf { - expression { return env.GIT_BRANCH == 'origin/develop'; } - expression { return tf_plan == "Changes" } - expression { return currentBuild.result != "FAILURE" } - } - } - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - def buildDir = "${WORKSPACE}/${BUILD_NUMBER}" - // Create directory with build number - sh "mkdir -p ${buildDir}" - - // Commit changes to the main branch - dir(buildDir) { - sh """ - git clone ${GIT_URL} - cd \$(ls -d */|head -n 1) - git checkout main - ls -lrtha - cd "${env.Region}/${env.Service}" - git pull --no-edit origin main - rm -f *.tfvars - git rm *.tfvars - git status - git add --all . - """ - - def git_status = false - while (!git_status) { - // Execute the git commands using shell - def gitResult = sh(script: """ - cd "\$(ls -d */|head -n 1)" - cd "${env.Region}/${env.Service}" - git fetch origin main - git merge origin/main - git commit -m "commit for terraform-destroy build - ${BUILD_NUMBER} for "${env.Region}"/"${env.Service} - - git push --porcelain origin main - """, returnStatus: true) - - if (gitResult == 0) { - git_status = true - } else { - echo "Git operation failed, retrying...." - sleep 3 // 3 seconds before retrying - } - } - - - } - } - } - } - - post { - always { - // Delete the build directory and the temporary directory - deleteDir() - } - } - } - - } -} diff --git a/jenkins_install/multiOutput.groovy b/jenkins_install/multiOutput.groovy deleted file mode 100644 index 6695f59ca..000000000 --- a/jenkins_install/multiOutput.groovy +++ /dev/null @@ -1,198 +0,0 @@ -def tf_plan = "Changes" -pipeline { - agent any - options { - ansiColor('xterm') - } - stages { - stage('Terraform Plan') { - when { - expression { - return env.GIT_BRANCH == 'origin/develop'; - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - def jobName = env.JOB_NAME - def parts = jobName.split('/') - - // Assuming the job name format is /job//job/job_name - def regionName = parts[1] - def serviceName = parts[2] - - - // Set environment variables for reuse in subsequent stages - env.Region = regionName - env.Service = serviceName - - //dir("${WORKSPACE}/${env.Region}/${env.Service}") { - // sh 'terraform init -upgrade' - //} - sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform init -upgrade" - - // Run Terraform plan and capture the output - def terraformPlanOutput = sh(script: "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform plan -out=tfplan.out", returnStdout: true).trim() - - // Check if the plan contains any changes - if (terraformPlanOutput.contains('No changes.')) { - echo 'No changes in Terraform plan. Skipping further stages.' - tf_plan = "No Changes" - } else { - // If there are changes, proceed with applying the plan - echo "Changes detected in Terraform plan. Proceeding with apply. \n${terraformPlanOutput}" - - } - } - } - } - } - - /** OPA Stage **/ - stage('OPA') { - when { - allOf{ - expression { return env.GIT_BRANCH == 'origin/develop'} - expression { return tf_plan == "Changes" } - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - // Run Terraform show and capture the output - sh "set +x && cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform show -json tfplan.out > tfplan.json" - // Run OPA eval - def opaOutput = sh(script: "opa eval -f pretty -b /cd3user/oci_tools/cd3_automation_toolkit/user-scripts/OPA/ -i \"${WORKSPACE}/${env.Region}/${env.Service}/tfplan.json\" data.terraform.deny",returnStdout: true).trim() - - if (opaOutput == '[]') { - echo "No OPA rules are violated. Proceeding with the next stage." - } - else { - echo "OPA Output:\n${opaOutput}" - unstable(message:"OPA Rules are violated.") - } - } - } - } - } - - stage('Get Approval') { - when { - allOf{ - expression { return env.GIT_BRANCH == 'origin/develop'} - expression {return tf_plan == "Changes"} - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - - options { - timeout(time: 1440, unit: 'MINUTES') // 24 hours timeout - } - - steps { - script { - input message: "Do you want to apply the plan?" - echo "Approval for the Apply Granted!" - } - } - } - stage('Terraform Apply') { - when { - allOf{ - expression { return env.GIT_BRANCH == 'origin/develop'} - expression {return tf_plan == "Changes"} - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform apply --auto-approve tfplan.out" - } - } - } - } - stage('Git Commit to main') { - when { - allOf{ - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - try { - sh ''' - set +x - mkdir -p ${WORKSPACE}/../${BUILD_NUMBER} - cd ${WORKSPACE}/../${BUILD_NUMBER} - git clone ${GIT_URL} - repo_name=${GIT_URL##*/} - cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} - git checkout main - reg=`echo ${JOB_NAME}| cut -d "/" -f2` - service=`echo ${JOB_NAME}| cut -d "/" -f3` - copy_path=${reg}/${service} - cp -r ${WORKSPACE}/${copy_path}/* ${copy_path}/ - git add ${copy_path}* - ''' - } catch(Exception e1) { - println(e1) - sh ''' - set +x - rm -rf ${WORKSPACE}/../${BUILD_NUMBER} - exit 1 - ''' - - } - sh ''' - set +x - repo_name=${GIT_URL##*/} - reg=`echo ${JOB_NAME}| cut -d "/" -f2` - service=`echo ${JOB_NAME}| cut -d "/" -f3` - cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} - git_status=`git status --porcelain` - if [[ $git_status ]];then - git commit -m "commit for terraform-apply build - ${BUILD_NUMBER} for "${reg}"/"${service} - else - echo "Nothing to commit" - fi - ''' - status = sh(script: ''' - set +x - repo_name=${GIT_URL##*/} - cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} - git pull --no-edit origin main - git push --porcelain origin main - ''', returnStatus: true) - - while (status != 0){ - println("Trying again ...") - status = sh(script: ''' - set +x - repo_name=${GIT_URL##*/} - cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} - git pull --no-edit origin main - set -x - git push --porcelain origin main - ''', returnStatus: true) - } - sh ''' - set +x - rm -rf ${WORKSPACE}/../${BUILD_NUMBER} - ''' - - } - } - } - } - } -} diff --git a/jenkins_install/plugins.txt b/jenkins_install/plugins.txt index 0a86d1e07..0f5e258a8 100644 --- a/jenkins_install/plugins.txt +++ b/jenkins_install/plugins.txt @@ -1,23 +1,22 @@ plain-credentials:143.v1b_df8b_d3b_e48 snakeyaml-api:2.2-111.vc6598e30cc65 -workflow-step-api:639.v6eca_cd8c04a_a_ -pipeline-build-step:516.v8ee60a_81c5b_9 -pipeline-input-step:477.v339683a_8d55e +workflow-step-api:657.v03b_e8115821b_ +pipeline-build-step:540.vb_e8849e1a_b_d8 +pipeline-input-step:491.vb_07d21da_1a_fb_ pipeline-stage-view:2.34 job-dsl:1.87 -credentials-binding:642.v737c34dea_6c2 +credentials-binding:657.v2b_19db_7d6e6d docker-workflow:572.v950f58993843 -scm-api:676.v886669a_199a_a_ -configuration-as-code:1700.v6f448841296e -config-file-provider:959.vcff671a_4518b_ +scm-api:689.v237b_6d3a_ef7f +configuration-as-code:1775.v810dc950b_514 +config-file-provider:968.ve1ca_eb_913f8c git:5.2.1 -credentials:1309.v8835d63eb_d8a_ -build-timeout:1.31 -script-security:1281.v22fb_899df1a_e +credentials:1319.v7eb_51b_3a_c97b_ +build-timeout:1.32 +script-security:1326.vdb_c154de8669 rebuild:330.v645b_7df10e2a_ uno-choice:2.8.1 file-parameters:316.va_83a_1221db_a_7 -scriptler:334.v29792d5a_c058 -ansicolor:0.6.2 -pipeline-graph-view:205.vb_8e3a_b_51f12e - +scriptler:348.v5d461e205da_a_ +ansicolor:1.0.4 +pipeline-graph-view:223.vf9249decdfcd diff --git a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy index 3ae60f0bc..db01fc1a8 100644 --- a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy +++ b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy @@ -1,25 +1,33 @@ -html_to_be_rendered = "" +def reg_list = new File("/cd3user/tenancies/${customer_prefix}/.config_files/regions_file") as String[] +def string_list = reg_list.join(", ") +reg_options = "" +for(item in string_list.split(",")){ + reg_options = reg_options+"" +} +def comp_list = new File("/cd3user/tenancies/${customer_prefix}/.config_files/compartments_file") as String[] +def string_list2 = comp_list.join(", ") +comp_options = "" +for(item in string_list2.split(",")){ + comp_options = comp_options+"" +} +html_to_be_rendered = "
" if(Workflow.toLowerCase().contains("export")){ html_to_be_rendered = """ ${html_to_be_rendered} - - - - - - - - - - - - - - - - + + + + + + + + + + + + """ } @@ -27,99 +35,79 @@ for (item in SubOptions.split(",")) { if (item.equals("Export Instances (excludes instances launched by OKE)")) { html_to_be_rendered = """ ${html_to_be_rendered} - - - + + - - - - + + + + - - - - - - + + """ } if (item.equals("Export Firewall Policy")) { html_to_be_rendered = """ ${html_to_be_rendered} - - - + + - - - + + + """ } if (item.equals("Clone Firewall Policy")) { html_to_be_rendered = """ ${html_to_be_rendered} - - - - - - - - - - + + + + + + + + + - - - - - - - - - - - - - + + + + + + + + + + + + """ } if (item.equals("Delete Firewall Policy")) { html_to_be_rendered = """ ${html_to_be_rendered} - - - - - - - - - - + + + + + + + + + - - - - + + """ } if (item.equals("Export Block Volumes/Block Backup Policy")) { @@ -131,13 +119,12 @@ for (item in SubOptions.split(",")) { - + - - - + + """ } @@ -150,10 +137,9 @@ for (item in SubOptions.split(",")) { - + """ } if (item.equals('Upload current terraform files/state to Resource Manager')){ @@ -161,74 +147,64 @@ for (item in SubOptions.split(",")) { ${html_to_be_rendered} - - - - - - - + + + + + - - - - - + + + + + """ } if (item.equals('Create Key/Vault')){ html_to_be_rendered = """ ${html_to_be_rendered} - - - - - - - + + + + + - - - - - + + + + + """ } - if (item.equals('Create Default Budget')){ html_to_be_rendered = """ ${html_to_be_rendered} - - - + - + """ } if (item.equals('Enable Cloud Guard')){ html_to_be_rendered = """ ${html_to_be_rendered} - - - - - - - - - + + + + + + """ } @@ -241,20 +217,16 @@ for (item in SubChildOptions.split(",")) { ${html_to_be_rendered} - - - - - - + + + + + """ - } break } - -html_to_be_rendered = "${html_to_be_rendered}

(Leave empty for all subscribed regions)

(Leave empty to fetch from all compartments)
-
-

-
-
(eg AD1,AD2,AD3)
(eg AD1,AD2,AD3)
-
-





(Leave empty if you need tool to generate the policy names)
- - -
(Leave empty if you need tool to generate the policy names)






(eg AD1,AD2,AD3)
(eg AD1,AD2,AD3)
-

(Leave empty for all subscribed regions)









(Leave empty to fetch from all compartments)
" - -return html_to_be_rendered \ No newline at end of file +html_to_be_rendered = "${html_to_be_rendered} " +return html_to_be_rendered diff --git a/jenkins_install/scriptler/scripts/MainOptions.groovy b/jenkins_install/scriptler/scripts/MainOptions.groovy index e3fe882fe..0abb16e34 100644 --- a/jenkins_install/scriptler/scripts/MainOptions.groovy +++ b/jenkins_install/scriptler/scripts/MainOptions.groovy @@ -15,7 +15,8 @@ return[ "Logging Services", "Software-Defined Data Centers - OCVS", "CIS Compliance Features", -"CD3 Services" +"CD3 Services", +"3rd Party Services" ] } else if(Workflow.toLowerCase().contains("export")) { diff --git a/jenkins_install/scriptler/scripts/SubChildOptions.groovy b/jenkins_install/scriptler/scripts/SubChildOptions.groovy index 8c5fd0377..d86bb2d04 100644 --- a/jenkins_install/scriptler/scripts/SubChildOptions.groovy +++ b/jenkins_install/scriptler/scripts/SubChildOptions.groovy @@ -1,9 +1,10 @@ List sec_rules = ["SECURITY RULES:disabled","Export Security Rules (From OCI into SecRulesinOCI sheet)", "Add/Modify/Delete Security Rules (Reads SecRulesinOCI sheet)"] List route_rules = ["ROUTE RULES:disabled","Export Route Rules (From OCI into RouteRulesinOCI sheet)", "Add/Modify/Delete Route Rules (Reads RouteRulesinOCI sheet)"] -List firewall_policy = ["FIREWALL POLICY:disabled","Add/Modify/Delete Policy", "Add/Modify/Delete Service","Add/Modify/Delete Service-list","Add/Modify/Delete Application","Add/Modify/Delete Application-list","Add/Modify/Delete Address-list","Add/Modify/Delete Url-list","Add/Modify/Delete Security rules","Add/Modify/Delete Mapped Secrets","Add/Modify/Delete Decryption Rules","Add/Modify/Delete Decryption Profile"] +List firewall_policy = ["FIREWALL POLICY:disabled","Add/Modify Policy", "Add/Modify Service","Add/Modify Service-list","Add/Modify Application","Add/Modify Application-list","Add/Modify Address-list","Add/Modify Url-list","Add/Modify Security rules","Add/Modify Mapped Secrets","Add/Modify Decryption Rules","Add/Modify Decryption Profile"] List drg_route_rules = ["DRG ROUTE RULES:disabled","Export DRG Route Rules (From OCI into DRGRouteRulesinOCI sheet)", "Add/Modify/Delete DRG Route Rules (Reads DRGRouteRulesinOCI sheet)"] List nsg = ["NSGs:disabled","Export NSGs (From OCI into NSGs sheet)", "Add/Modify/Delete NSGs (Reads NSGs sheet)"] List cis = ["CIS:disabled","Download latest compliance checking script", "Execute compliance checking script"] +List showoci = ["SHOW OCI:disabled","Download Latest ShowOCI Script", "Execute ShowOCI Script"] List customer_connectivity = ["Connectivity:disabled","Create Remote Peering Connections"] List final_list = [] @@ -20,8 +21,11 @@ for (item in SubOptions.split(",")) { if (item.equals("Network Security Groups")){ final_list += nsg } - if (item.equals("CIS Compliance Checking Script")){ + if (item.equals("CIS Compliance Check Script")){ final_list += cis + } + if (item.equals("ShowOCI Report")){ + final_list += showoci } if (item.equals("Add/Modify/Delete Firewall Policy")){ final_list += firewall_policy diff --git a/jenkins_install/scriptler/scripts/SubOptions.groovy b/jenkins_install/scriptler/scripts/SubOptions.groovy index 911185cc7..b7ca9ea99 100644 --- a/jenkins_install/scriptler/scripts/SubOptions.groovy +++ b/jenkins_install/scriptler/scripts/SubOptions.groovy @@ -10,9 +10,9 @@ List load_balancers = ["LOAD BALANCERS:disabled","Add/Modify/Delete Load List management_services = ["MANAGEMENT SERVICES:disabled","Add/Modify/Delete Notifications", "Add/Modify/Delete Events", "Add/Modify/Delete Alarms", "Add/Modify/Delete ServiceConnectors"] List developer_services = ["DEVELOPER SERVICES:disabled","Add/Modify/Delete OKE Cluster and Nodepools"] List logging_services = ["LOGGING SERVICES:disabled","Enable VCN Flow Logs", "Enable LBaaS Logs", "Enable Object Storage Buckets Write Logs"] -List cis = ["CIS:disabled","CIS Compliance Checking Script", "Create Key/Vault", "Create Default Budget", "Enable Cloud Guard"] +List cis = ["CIS:disabled","Create Key/Vault", "Create Default Budget", "Enable Cloud Guard"] List cd3_services = ["CD3 SERVICES:disabled","Fetch Compartments OCIDs to variables file", "Fetch Protocols to OCI_Protocols"] - +List utility_services = ["3rd Party Services:disabled","CIS Compliance Check Script", "ShowOCI Report"] List ex_identity = ["IDENTITY:disabled","Export Compartments/Groups/Policies", "Export Users", "Export Network Sources"] List ex_network = ["NETWORK:disabled","Export all Network Components", "Export Network components for VCNs/DRGs/DRGRouteRulesinOCI Tabs", "Export Network components for DHCP Tab", "Export Network components for SecRulesinOCI Tab", "Export Network components for RouteRulesinOCI Tab", "Export Network components for SubnetsVLANs Tab", "Export Network components for NSGs Tab"] List ex_firewall = ["OCI FIREWALL:disabled","Export Firewall Policy", "Export Firewall"] @@ -68,6 +68,9 @@ final_list += cis if (item.equals("CD3 Services")){ final_list += cd3_services } +if (item.equals("3rd Party Services")){ +final_list += utility_services +} if (item.equals("Export Identity")){ final_list += ex_identity } diff --git a/jenkins_install/scriptler/scripts/ValidateParams.groovy b/jenkins_install/scriptler/scripts/ValidateParams.groovy index 562deb0f6..196416905 100644 --- a/jenkins_install/scriptler/scripts/ValidateParams.groovy +++ b/jenkins_install/scriptler/scripts/ValidateParams.groovy @@ -13,8 +13,9 @@ def validate_params(Workflow,MainOptions,SubOptions,SubChildOptions,AdditionalFi "Management Services":["Add/Modify/Delete Notifications", "Add/Modify/Delete Events", "Add/Modify/Delete Alarms", "Add/Modify/Delete ServiceConnectors"], "Developer Services":["Upload current terraform files/state to Resource Manager", "Add/Modify/Delete OKE Cluster and Nodepools"], "Logging Services":["Enable VCN Flow Logs", "Enable LBaaS Logs", "Enable Object Storage Buckets Write Logs"], - "CIS Compliance Features":["CIS Compliance Checking Script", "Create Key/Vault", "Create Default Budget", "Enable Cloud Guard"], - "CD3 Services":["Fetch Compartments OCIDs to variables file", "Fetch Protocols to OCI_Protocols"] + "CIS Compliance Features":["Create Key/Vault", "Create Default Budget", "Enable Cloud Guard"], + "CD3 Services":["Fetch Compartments OCIDs to variables file", "Fetch Protocols to OCI_Protocols"], + "3rd Party Services":["CIS Compliance Check Script", "ShowOCI Report"] ] def non_gf_options_map = [ "Export Identity":["Export Compartments/Groups/Policies", "Export Users", "Export Network Sources"], diff --git a/jenkins_install/setUpOCI_config.xml b/jenkins_install/setUpOCI_config.xml index a5cb450cc..b6435444f 100644 --- a/jenkins_install/setUpOCI_config.xml +++ b/jenkins_install/setUpOCI_config.xml @@ -304,8 +304,9 @@ pipeline { set +x if [[ -n "$Excel_Template_FILENAME" ]];then size=$(wc --bytes < "$Excel_Template") - if [[ ( $size -gt 300000 ) || ( $Excel_Template_FILENAME != *.xlsx ) ]]; then - echo "Failed" + if [[ ( $size -gt 1000000 ) || ( $Excel_Template_FILENAME != *.xlsx ) ]]; then + set -x + echo "Excel File Validation Failed" fi fi ''' , returnStdout: true).trim() @@ -353,7 +354,7 @@ pipeline { if [ -e "$cd3_file" ]; then cp "$cd3_file" "$cd3_backup" fi - mv "$Excel_Template" "$cd3_file" + cp "$Excel_Template" "$cd3_file" sed -i "s|cd3file=.*|cd3file=${cd3_file}|g" $prop_file fi ''' @@ -392,6 +393,24 @@ pipeline { python setUpOCI.py --devops True --main_options "${MainOptions}" --sub_options "${SubOptions}" --sub_child_options "${SubChildOptions}" --add_filter "${AdditionalFilters}" $prop_file ''' + script { + // For latest CD3 XL file. + def latestXL = sh(returnStdout: true, script: ''' + set +x + ls -t /cd3user/tenancies/${customer_prefix}/*.xl* | head -n 1 + ''').trim() + + + echo "XL is ${latestXL}" + sh "rm -f *.xl*" + sh "cp '${latestXL}' ." + + } + } + } + post { + success { + archiveArtifacts '*.xl*' } } } @@ -408,7 +427,7 @@ pipeline { steps { catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { script { - def data = readFile(file: "/cd3user/tenancies/${customer_prefix}/terraform_files/import_scripts.safe") + def data = readFile(file: "/cd3user/tenancies/${customer_prefix}/terraform_files/.safe/import_scripts.safe") def lines = data.readLines() for (line in lines) { script_full_path = (line.replace('//','/')).split("/") @@ -464,7 +483,7 @@ pipeline { catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { script { def jobs = [] - def data = readFile(file: "/cd3user/tenancies/${customer_prefix}/terraform_files/updated_paths.safe") + def data = readFile(file: "/cd3user/tenancies/${customer_prefix}/terraform_files/.safe/updated_paths.safe") def lines = data.readLines() if (lines.size() == 0) { println("No terraform configuration file generated") @@ -484,6 +503,7 @@ pipeline { } } } + } true diff --git a/jenkins_install/singleOutput-tf-destroy.groovy b/jenkins_install/singleOutput-tf-destroy.groovy deleted file mode 100644 index 53ae54627..000000000 --- a/jenkins_install/singleOutput-tf-destroy.groovy +++ /dev/null @@ -1,149 +0,0 @@ -/* Set the various stages of the build */ -def tf_plan = "Changes" -pipeline { - agent any - options { - ansiColor('xterm') - } - environment { - CI = 'true' - } - stages { - stage('Terraform Destroy Plan') { - when { - expression { return env.GIT_BRANCH == 'origin/develop'; } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - def jobName = env.JOB_NAME - def parts = jobName.split('/') - - // Assuming job name format is /job/job_name - def regionName = parts[1] - // Set environment variables for reuse - env.Region = regionName - - sh "cd \"${WORKSPACE}/${env.Region}\" && terraform init -upgrade" - - // Run Terraform plan - def terraformPlanOutput = sh(script: "cd \"${WORKSPACE}/${env.Region}\" && terraform plan -destroy", returnStdout: true).trim() - - // Check if the plan contains any changes - if (terraformPlanOutput.contains('No changes.')) { - echo 'No changes in Terraform plan. Skipping further stages.' - tf_plan = "No Changes" - } else { - // If there are changes, proceed with applying the plan - echo "Proceeding with apply. \n${terraformPlanOutput}" - - } - } - } - } - } - - stage('Get Approval') { - when { - allOf{ - expression { return env.GIT_BRANCH == 'origin/develop';} - expression { return tf_plan == "Changes" } - expression {return currentBuild.result != "FAILURE" } - } - } - input { - message "Do you want to perform terraform destroy?" - } - steps { - echo "Approval for the Terraform Destroy Granted!" - } - } - - stage('Terraform Destroy') { - when { - allOf{ - expression {return env.GIT_BRANCH == 'origin/develop';} - expression { return tf_plan == "Changes" } - expression {return currentBuild.result != "FAILURE" } - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - sh "cd \"${WORKSPACE}/${env.Region}\" && terraform destroy --auto-approve" - } - } - } - } - - - /** Main branch commit to keep changes in Sync **/ - stage('Commit To Main') { - when { - allOf { - expression { return env.GIT_BRANCH == 'origin/develop'; } - expression { return tf_plan == "Changes" } - expression { return currentBuild.result != "FAILURE" } - } - } - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - def buildDir = "${WORKSPACE}/${BUILD_NUMBER}" - // Create a directory with the build number - sh "mkdir -p ${buildDir}" - - // Commit the changes to the main branch - dir(buildDir) { - sh """ - git clone ${GIT_URL} - cd \$(ls -d */|head -n 1) - git checkout main - cd "${env.Region}" - git pull --no-edit origin main - rm -f *.tfvars - git rm *.tfvars - git status - git add --all . - """ - - def git_status = false - while (!git_status) { - // Execute the git commands using shell - def gitResult = sh(script: """ - cd "\$(ls -d */|head -n 1)" - cd "${env.Region}" - ls -lrtha - git fetch origin main - git merge origin/main - git commit -m "commit for terraform-destroy build - ${BUILD_NUMBER} for "${env.Region} - - git push --porcelain origin main - """, returnStatus: true) - - if (gitResult == 0) { - git_status = true - } else { - echo "Git operation failed, retrying...." - sleep 3 // 3 seconds before retrying - } - } - - } - } - } - } - - post { - always { - // Delete the build directory and the temporary directory - deleteDir() - } - } - } - - - } -} diff --git a/jenkins_install/singleOutput.groovy b/jenkins_install/singleOutput.groovy deleted file mode 100644 index d68e7998a..000000000 --- a/jenkins_install/singleOutput.groovy +++ /dev/null @@ -1,165 +0,0 @@ -/* Set the various stages of the build */ -def tf_plan = "Changes" -pipeline { - agent any - options { - ansiColor('xterm') - } - stages { - stage('Terraform Plan') { - when { - expression { - return env.GIT_BRANCH == 'origin/develop'; - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - def jobName = env.JOB_NAME - def parts = jobName.split('/') - - // Assuming the job name format is /job/job_name - def regionName = parts[1] - - // Set environment variables for reuse in subsequent stages - env.Region = regionName - sh "cd \"${WORKSPACE}/${env.Region}\" && terraform init -upgrade" - - // Run Terraform plan and capture the output - def terraformPlanOutput = sh(script: "cd \"${WORKSPACE}/${env.Region}\" && terraform plan -out=tfplan.out", returnStdout: true).trim() - - // Check if the plan contains any changes - if (terraformPlanOutput.contains('No changes.')) { - echo 'No changes in Terraform plan. Skipping further stages.' - tf_plan = "No Changes" - } else { - // If there are changes, proceed with applying the plan - echo "Changes detected in Terraform plan. Proceeding with apply. \n${terraformPlanOutput}" - - } - } - } - } - } - /** OPA Stage **/ - stage('OPA') { - when { - allOf{ - expression { return env.GIT_BRANCH == 'origin/develop'} - expression { return tf_plan == "Changes" } - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - // Run Terraform show and capture the output - sh "set +x && cd \"${WORKSPACE}/${env.Region}\" && terraform show -json tfplan.out > tfplan.json" - // Run OPA eval - def opaOutput = sh(script: "opa eval -f pretty -b /cd3user/oci_tools/cd3_automation_toolkit/user-scripts/OPA/ -i \"${WORKSPACE}/${env.Region}/tfplan.json\" data.terraform.deny",returnStdout: true).trim() - - if (opaOutput == '[]') { - echo "No OPA rules are violated. Proceeding with the next stage." - } - else { - echo "OPA Output:\n${opaOutput}" - unstable(message:"OPA Rules are violated.") - } - } - } - } - } - - stage('Get Approval') { - when { - allOf{ - expression { return env.GIT_BRANCH == 'origin/develop'} - expression {return tf_plan == "Changes"} - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - - options { - timeout(time: 1440, unit: 'MINUTES') // 24hours - } - - steps { - script { - input message: "Do you want to apply the plan?" - echo "Approval for the Apply Granted!" - } - } - } - - stage('Terraform Apply') { - when { - allOf{ - expression { return env.GIT_BRANCH == 'origin/develop'} - expression {return tf_plan == "Changes"} - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - sh "cd \"${WORKSPACE}/${env.Region}\" && terraform apply --auto-approve tfplan.out" - } - } - } - } - stage('Git commit to main') { - when { - allOf{ - expression {return currentBuild.result != "ABORTED" } - expression {return currentBuild.result != "FAILURE" } - } - } - steps { - catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { - script { - try { - sh ''' - mkdir -p ${WORKSPACE}/../${BUILD_NUMBER} - cd ${WORKSPACE}/../${BUILD_NUMBER} - git clone ${GIT_URL} - repo_name=${GIT_URL##*/} - cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} - git checkout main - reg=`echo ${JOB_NAME}| cut -d "/" -f2` - copy_path=${reg} - cp -r ${WORKSPACE}/${copy_path}/* ${copy_path}/ - git add ${copy_path}* - git_status=`git status --porcelain` - if [[ $git_status ]];then - git commit -m "commit for terraform-apply build - ${BUILD_NUMBER} for "${reg} - git push origin main - else - echo "Nothing to commit" - fi - cd ${WORKSPACE}/.. - rm -rf ${WORKSPACE}/../${BUILD_NUMBER} - ''' - - } catch(Exception e1) { - println(e1) - sh ''' - cd ${WORKSPACE}/.. - rm -rf ${WORKSPACE}/../${BUILD_NUMBER} - exit 1 - ''' - - } - - } - } - } - } - } -} - diff --git a/jenkins_install/tf-apply.groovy b/jenkins_install/tf-apply.groovy new file mode 100644 index 000000000..e180317bb --- /dev/null +++ b/jenkins_install/tf-apply.groovy @@ -0,0 +1,257 @@ +def tf_plan = "Changes" +pipeline { + agent any + options { + ansiColor('xterm') + } + stages { + stage('Set Environment Variables') { + steps { + script { + def fileContent = readFile "${JENKINS_HOME}/jenkins.properties" + // Split file content into lines + def lines = fileContent.readLines() + // Process each line to extract variable name and value + def variables = [:] + lines.each { line -> + def parts = line.split('=') + if (parts.size() == 2) { + variables[parts[0].trim()] = parts[1].trim() + } + } + def variableOds = variables['outdir_structure'].toString().replaceAll("\\[|\\]", '').replaceAll('"', '') + env.out_str = "${variableOds}" + def jobName = env.JOB_NAME + def parts = jobName.split('/') + if (env.out_str == 'Multiple_Outdir') { + // Assuming the job name format is /job//job/job_name + env.Region = parts[1] + env.Service = parts[2] + } + else { + // Assuming the job name format is /job/job_name + env.Region = parts[1] + env.Service = '' + if (env.Region == 'global') { + env.Service = 'rpc' + } + } + } + } + } + + stage('Terraform Plan') { + when { + expression { + return env.GIT_BRANCH == 'origin/develop'; + } + } + + steps { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + script { + sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform init -upgrade" + // Run Terraform plan and capture the output + terraformPlanOutput = sh(script: "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform plan -out=tfplan.out", returnStdout: true).trim() + // Check if the plan contains any changes + if (terraformPlanOutput.contains('No changes.')) { + echo 'No changes in Terraform plan. Skipping further stages.' + tf_plan = "No Changes" + } else { + // If there are changes, proceed with applying the plan + echo "Changes detected in Terraform plan. Proceeding with apply. \n${terraformPlanOutput}" + } + } + } + } + } + + // OPA Stage + stage('OPA') { + when { + allOf{ + expression { return env.GIT_BRANCH == 'origin/develop' } + expression { return tf_plan == "Changes" } + expression { return currentBuild.result != "ABORTED" } + expression { return currentBuild.result != "FAILURE" } + } + } + + steps { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + script { + // Run Terraform show and capture the output + sh "set +x && cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform show -json tfplan.out > tfplan.json" + // Run OPA eval + opaOutput = sh(script: "opa eval -f pretty -b /cd3user/oci_tools/cd3_automation_toolkit/user-scripts/OPA/ -i \"${WORKSPACE}/${env.Region}/${env.Service}/tfplan.json\" data.terraform.deny", returnStdout: true).trim() + if (opaOutput == '[]') { + echo "No OPA rules are violated. Proceeding with the next stage." + } else { + echo "OPA Output:\n${opaOutput}" + unstable(message: "OPA Rules are violated.") + } + } + } + } + } + + // Get Approval + stage('Get Approval') { + when { + allOf{ + expression { return env.GIT_BRANCH == 'origin/develop' } + expression { return tf_plan == "Changes" } + expression { return currentBuild.result != "ABORTED" } + expression { return currentBuild.result != "FAILURE" } + } + } + + options { + timeout(time: 1440, unit: 'MINUTES') // 24 hours timeout + } + + steps { + script { + input message: "Do you want to apply the plan?" + echo "Approval for the Apply Granted!" + } + } + } + + // Terraform Apply + stage('Terraform Apply') { + when { + allOf{ + expression { return env.GIT_BRANCH == 'origin/develop' } + expression { return tf_plan == "Changes" } + expression { return currentBuild.result != "ABORTED" } + expression { return currentBuild.result != "FAILURE" } + } + } + + steps { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + script { + sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform apply --auto-approve tfplan.out" + + } + } + } + } + + // Git Commit to main + stage('Git Commit to main') { + when { + allOf{ + expression { return currentBuild.result != "ABORTED" } + expression { return currentBuild.result != "FAILURE" } + } + } + + steps { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + script { + if (env.out_str == 'Multiple_Outdir') { + try { + sh ''' + set +x + mkdir -p ${WORKSPACE}/../${BUILD_NUMBER} + cd ${WORKSPACE}/../${BUILD_NUMBER} + git clone ${GIT_URL} + repo_name=${GIT_URL##*/} + cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} + git checkout main + reg=`echo ${JOB_NAME}| cut -d "/" -f2` + service=`echo ${JOB_NAME}| cut -d "/" -f3` + copy_path=${reg}/${service} + cp -r ${WORKSPACE}/${copy_path}/* ${copy_path}/ + git add ${copy_path}* + ''' + } catch(Exception e1) { + println(e1) + sh ''' + set +x + rm -rf ${WORKSPACE}/../${BUILD_NUMBER} + exit 1 + ''' + } + sh ''' + set +x + repo_name=${GIT_URL##*/} + reg=`echo ${JOB_NAME}| cut -d "/" -f2` + service=`echo ${JOB_NAME}| cut -d "/" -f3` + cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} + git_status=`git status --porcelain` + if [[ $git_status ]]; then + git commit -m "commit for terraform-apply build - ${BUILD_NUMBER} for "${reg}"/"${service} + else + echo "Nothing to commit" + fi + ''' + status = sh(script: ''' + set +x + repo_name=${GIT_URL##*/} + cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} + git pull --no-edit origin main + git push --porcelain origin main + ''', returnStatus: true) + + while (status != 0){ + println("Trying again ...") + status = sh(script: ''' + set +x + repo_name=${GIT_URL##*/} + cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} + git config pull.rebase true + git pull --no-edit origin main + set -x + git push --porcelain origin main + ''', returnStatus: true) + } + sh ''' + set +x + rm -rf ${WORKSPACE}/../${BUILD_NUMBER} + ''' + + } else { + try { + sh ''' + set +x + mkdir -p ${WORKSPACE}/../${BUILD_NUMBER} + cd ${WORKSPACE}/../${BUILD_NUMBER} + git clone ${GIT_URL} + repo_name=${GIT_URL##*/} + cd ${WORKSPACE}/../${BUILD_NUMBER}/${repo_name} + git checkout main + reg=`echo ${JOB_NAME}| cut -d "/" -f2` + copy_path=${reg} + cp -r ${WORKSPACE}/${copy_path}/* ${copy_path}/ + git add ${copy_path}* + git_status=`git status --porcelain` + if [[ $git_status ]]; then + git commit -m "commit for terraform-apply build - ${BUILD_NUMBER} for "${reg}"/"${service} + git config pull.rebase true + git pull --no-edit origin main + git push origin main + else + echo "Nothing to commit" + fi + cd ${WORKSPACE}/.. + rm -rf ${WORKSPACE}/../${BUILD_NUMBER} + ''' + } catch(Exception e1) { + println(e1) + sh ''' + cd ${WORKSPACE}/.. + rm -rf ${WORKSPACE}/../${BUILD_NUMBER} + exit 1 + ''' + } + + } + } + } + } + } + } +} diff --git a/jenkins_install/tf-destroy.groovy b/jenkins_install/tf-destroy.groovy new file mode 100644 index 000000000..aa0ab8afa --- /dev/null +++ b/jenkins_install/tf-destroy.groovy @@ -0,0 +1,169 @@ +/* Set the various stages of the build */ +def tf_plan = "Changes" + +pipeline { + agent any + options { + ansiColor('xterm') + } + stages { + stage('Set Environment Variables') { + steps { + script { + def fileContent = readFile "${JENKINS_HOME}/jenkins.properties" + // Split file content into lines + def lines = fileContent.readLines() + + // Process each line to extract variable name and value + def variables = [:] + lines.each { line -> + def parts = line.split('=') + if (parts.size() == 2) { + variables[parts[0].trim()] = parts[1].trim() + } + } + + def variableOds = variables['outdir_structure'].toString().replaceAll("\\[|\\]", '').replaceAll('"', '') + env.out_str = "${variableOds}" + def jobName = env.JOB_NAME + def parts = jobName.split('/') + if (env.out_str == 'Multiple_Outdir') { + // Assuming the job name format is /job//job/job_name + env.Region = parts[1] + env.Service = parts[2] + } + else { + // Assuming the job name format is /job/job_name + env.Region = parts[1] + env.Service = '' + if (env.Region == 'global') { + env.Service = 'rpc' + } + } + } + } + } + + stage('Terraform Destroy Plan') { + when { + expression { return env.GIT_BRANCH == 'origin/develop';} + } + + steps { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + script { + + sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform init -upgrade" + // Run Terraform plan + terraformPlanOutput = sh(script: "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform plan -destroy", returnStdout: true).trim() + + // Check if the plan contains any changes + if (terraformPlanOutput.contains('No changes.')) { + echo 'No changes in Terraform plan. Skipping further stages.' + tf_plan = "No Changes" + } else { + // If there are changes, proceed with applying the plan + echo "Proceeding with destroy. \n${terraformPlanOutput}" + } + } + } + } + } + + /** Approval for Terraform Apply **/ + stage('Get Approval') { + when { + allOf { + expression {return env.GIT_BRANCH == 'origin/develop'; } + expression {return tf_plan == "Changes" } + expression {return currentBuild.result != "FAILURE" } + } + } + input { + message "Do you want to perform terraform destroy?" + } + steps { + echo "Approval for the Destroy Granted!" + } + } + + stage('Terraform Destroy') { + when { + allOf { + expression {return env.GIT_BRANCH == 'origin/develop'; } + expression {return tf_plan == "Changes" } + expression {return currentBuild.result != "FAILURE" } + } + } + + steps { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + script { + sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform destroy --auto-approve" + } + } + } + } + + /** Main branch commit to keep changes in Sync **/ + stage('Commit To Main') { + when { + allOf { + expression { return env.GIT_BRANCH == 'origin/develop'; } + expression { return tf_plan == "Changes" } + expression { return currentBuild.result != "FAILURE" } + } + } + steps { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + script { + def buildDir = "${WORKSPACE}/${BUILD_NUMBER}" + // Create directory with build number + sh "mkdir -p ${buildDir}" + // Commit changes to the main branch + dir(buildDir) { + sh """ + git clone ${GIT_URL} + cd \$(ls -d */|head -n 1) + git checkout main + cd "${env.Region}/${env.Service}" + git pull --no-edit origin main + rm -f *.tfvars + git status + git add --all . + """ + + def git_status = false + while (!git_status) { + // Execute the git commands using shell + def gitResult = sh(script: """ + cd "\$(ls -d */|head -n 1)" + cd "${env.Region}/${env.Service}" + git fetch origin main + git merge origin/main + git commit -m "commit for terraform-destroy build - ${BUILD_NUMBER} for "${env.Region}"/"${env.Service} + + git push --porcelain origin main + """, returnStatus: true) + + if (gitResult == 0) { + git_status = true + } else { + echo "Git operation failed, retrying...." + sleep 3 // 3 seconds before retrying + } + } + } + } + } + } + + post { + always { + // Delete the build directory and the temporary directory + deleteDir() + } + } + } + } +}