From 525f7fda9da06788a13dfa9dc262cd4573e0792a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Mon, 24 Jun 2024 16:00:54 +0200 Subject: [PATCH 01/13] Remove unused variable REPO_TO_CHECKOUT MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- vars/environ.groovy | 2 -- 1 file changed, 2 deletions(-) diff --git a/vars/environ.groovy b/vars/environ.groovy index f3f7aac31..8462ea766 100644 --- a/vars/environ.groovy +++ b/vars/environ.groovy @@ -33,7 +33,6 @@ def set_tls_pr_environment(is_production) { set_common_environment() env.JOB_TYPE = 'PR' env.TARGET_REPO = 'tls' - env.REPO_TO_CHECKOUT = 'tls' if (is_production) { set_common_pr_production_environment() set_tls_pr_production_environment() @@ -70,7 +69,6 @@ def set_tls_release_environment() { set_common_environment() env.JOB_TYPE = 'release' env.TARGET_REPO = 'tls' - env.REPO_TO_CHECKOUT = 'tls' env.CHECKOUT_METHOD = 'parametrized' } From 722471511073c9469f051638ff2d37023a11cae1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Mon, 24 Jun 2024 17:19:13 +0200 Subject: [PATCH 02/13] Rename tls-specific methods in checkout_repo MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- vars/checkout_repo.groovy | 8 ++++---- vars/common.groovy | 4 ++-- vars/gen_jobs.groovy | 10 +++++----- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/vars/checkout_repo.groovy b/vars/checkout_repo.groovy index c61158dd1..91242ac70 100644 --- a/vars/checkout_repo.groovy +++ b/vars/checkout_repo.groovy @@ -59,7 +59,7 @@ Map checkout_report_errors(scm_config) { } } -Map checkout_repo() { +Map checkout_tls_repo() { def scm_config if (env.TARGET_REPO == 'tls' && env.CHECKOUT_METHOD == 'scm') { scm_config = scm @@ -85,8 +85,8 @@ git submodule foreach --recursive git config url.git@github.com:.insteadOf https } } -Map checkout_repo(BranchInfo info) { - Map m = checkout_repo() +Map checkout_tls_repo(BranchInfo info) { + Map m = checkout_tls_repo() write_overrides(info) return m } @@ -135,7 +135,7 @@ def checkout_mbed_os() { dir('TARGET_IGNORE/mbedtls') { deleteDir() - checkout_repo() + checkout_tls_repo() } sh """\ ulimit -f 20971520 diff --git a/vars/common.groovy b/vars/common.groovy index 2f04135be..468a5a6d0 100644 --- a/vars/common.groovy +++ b/vars/common.groovy @@ -230,7 +230,7 @@ BranchInfo get_branch_information() { dir('src') { deleteDir() - checkout_repo.checkout_repo() + checkout_repo.checkout_tls_repo() info.has_min_requirements = fileExists('scripts/min_requirements.py') @@ -280,7 +280,7 @@ BranchInfo get_branch_information() { try { dir('src') { deleteDir() - checkout_repo.checkout_repo() + checkout_repo.checkout_tls_repo() } get_docker_image(platform) def all_sh_help = sh( diff --git a/vars/gen_jobs.groovy b/vars/gen_jobs.groovy index 832150c04..4498a6bdd 100644 --- a/vars/gen_jobs.groovy +++ b/vars/gen_jobs.groovy @@ -48,7 +48,7 @@ Map> gen_simple_windows_jobs(BranchInfo info, String labe try { dir('src') { deleteDir() - checkout_repo.checkout_repo(info) + checkout_repo.checkout_tls_repo(info) timeout(time: common.perJobTimeout.time, unit: common.perJobTimeout.unit) { analysis.record_inner_timestamps('windows', label) { @@ -136,7 +136,7 @@ Map> gen_docker_job(Map hooks, deleteDir() common.get_docker_image(platform) dir('src') { - checkout_repo.checkout_repo(info) + checkout_repo.checkout_tls_repo(info) if (hooks.post_checkout) { hooks.post_checkout() } @@ -258,7 +258,7 @@ scripts/min_requirements.py --user ${info.python_requirements_override_file} common.get_docker_image(platform) } dir('src') { - checkout_repo.checkout_repo(info) + checkout_repo.checkout_tls_repo(info) writeFile file: 'steps.sh', text: """\ #!/bin/sh set -eux @@ -345,7 +345,7 @@ def gen_windows_testing_job(BranchInfo info, String toolchain, String label_pref stage('checkout') { dir("src") { deleteDir() - checkout_repo.checkout_repo(info) + checkout_repo.checkout_tls_repo(info) } /* The empty files are created to re-create the directory after it * and its contents have been removed by deleteDir. */ @@ -618,7 +618,7 @@ def gen_coverity_push_jobs() { try { dir("src") { deleteDir() - checkout_repo.checkout_repo() + checkout_repo.checkout_tls_repo() sshagent([env.GIT_CREDENTIALS_ID]) { analysis.record_inner_timestamps('container-host', job_name) { sh 'git push origin HEAD:coverity_scan' From 8f9814de333310136b12ad1666b11936bb4491df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Mon, 24 Jun 2024 17:44:18 +0200 Subject: [PATCH 03/13] Remove deep references to global env.MBED_TLS_BRANCH MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Pass the branch name along the call chain instead. This is in preparation for testing multiple branches in the same job. Signed-off-by: Bence Szépkúti --- src/org/mbed/tls/jenkins/BranchInfo.groovy | 3 +++ vars/checkout_repo.groovy | 12 ++++++------ vars/common.groovy | 8 +++++--- vars/environ.groovy | 5 ----- vars/gen_jobs.groovy | 17 +++++++++-------- vars/mbedtls-pr-test-Jenkinsfile | 2 +- vars/mbedtls-release-Jenkinsfile | 2 +- vars/mbedtls.groovy | 12 ++++++------ 8 files changed, 31 insertions(+), 30 deletions(-) diff --git a/src/org/mbed/tls/jenkins/BranchInfo.groovy b/src/org/mbed/tls/jenkins/BranchInfo.groovy index 080b36133..1be43e75a 100755 --- a/src/org/mbed/tls/jenkins/BranchInfo.groovy +++ b/src/org/mbed/tls/jenkins/BranchInfo.groovy @@ -1,6 +1,9 @@ package org.mbed.tls.jenkins class BranchInfo { + /** The name of the branch */ + public String branch + /* Map from component name to chosen platform to run it, or to null * if no platform has been chosen yet. */ public Map all_all_sh_components diff --git a/vars/checkout_repo.groovy b/vars/checkout_repo.groovy index 91242ac70..a99c4d8ff 100644 --- a/vars/checkout_repo.groovy +++ b/vars/checkout_repo.groovy @@ -59,12 +59,12 @@ Map checkout_report_errors(scm_config) { } } -Map checkout_tls_repo() { +Map checkout_tls_repo(String branch) { def scm_config if (env.TARGET_REPO == 'tls' && env.CHECKOUT_METHOD == 'scm') { scm_config = scm } else { - scm_config = parametrized_repo(env.MBED_TLS_REPO, env.MBED_TLS_BRANCH) + scm_config = parametrized_repo(env.MBED_TLS_REPO, branch) } // Use bilingual scripts when manipulating the git config @@ -86,7 +86,7 @@ git submodule foreach --recursive git config url.git@github.com:.insteadOf https } Map checkout_tls_repo(BranchInfo info) { - Map m = checkout_tls_repo() + Map m = checkout_tls_repo(info.branch) write_overrides(info) return m } @@ -117,7 +117,7 @@ Map parametrized_repo(String repo, String branch) { ] } -def checkout_mbed_os() { +def checkout_mbed_os(BranchInfo info) { checkout_report_errors([ scm: [ $class: 'GitSCM', @@ -130,12 +130,12 @@ def checkout_mbed_os() { ], ] ]) - if (env.MBED_TLS_BRANCH) { + if (info != null) { dir('features/mbedtls/importer') { dir('TARGET_IGNORE/mbedtls') { deleteDir() - checkout_tls_repo() + checkout_tls_repo(info) } sh """\ ulimit -f 20971520 diff --git a/vars/common.groovy b/vars/common.groovy index 468a5a6d0..d07cdd1ec 100644 --- a/vars/common.groovy +++ b/vars/common.groovy @@ -218,8 +218,10 @@ docker run -u \$(id -u):\$(id -g) -e MAKEFLAGS -e VERBOSE_LOGS --rm --entrypoint /* Gather information about the branch that determines how to set up the * test environment. * In particular, get components of all.sh for Linux platforms. */ -BranchInfo get_branch_information() { +BranchInfo get_branch_information(String branch) { BranchInfo info = new BranchInfo() + info.branch = branch + Map jobs = [:] jobs << gen_jobs.job('all-platforms') { @@ -230,7 +232,7 @@ BranchInfo get_branch_information() { dir('src') { deleteDir() - checkout_repo.checkout_tls_repo() + checkout_repo.checkout_tls_repo(branch) info.has_min_requirements = fileExists('scripts/min_requirements.py') @@ -280,7 +282,7 @@ BranchInfo get_branch_information() { try { dir('src') { deleteDir() - checkout_repo.checkout_tls_repo() + checkout_repo.checkout_tls_repo(branch) } get_docker_image(platform) def all_sh_help = sh( diff --git a/vars/environ.groovy b/vars/environ.groovy index 8462ea766..1203a882a 100644 --- a/vars/environ.groovy +++ b/vars/environ.groovy @@ -35,7 +35,6 @@ def set_tls_pr_environment(is_production) { env.TARGET_REPO = 'tls' if (is_production) { set_common_pr_production_environment() - set_tls_pr_production_environment() } else { env.CHECKOUT_METHOD = 'parametrized' } @@ -61,10 +60,6 @@ def set_common_pr_production_environment() { } } -def set_tls_pr_production_environment() { - env.MBED_TLS_BRANCH = env.CHANGE_BRANCH -} - def set_tls_release_environment() { set_common_environment() env.JOB_TYPE = 'release' diff --git a/vars/gen_jobs.groovy b/vars/gen_jobs.groovy index 4498a6bdd..bbf91c16f 100644 --- a/vars/gen_jobs.groovy +++ b/vars/gen_jobs.groovy @@ -486,7 +486,7 @@ fi } /* Mbed OS Example job generation */ -def gen_all_example_jobs() { +def gen_all_example_jobs(BranchInfo info = null) { def jobs = [:] examples.examples.each { example -> @@ -495,6 +495,7 @@ def gen_all_example_jobs() { for (platform in example.value['platforms']()) { if (examples.raas_for_platform[platform]) { jobs = jobs + gen_mbed_os_example_job( + info, example.value['repo'], example.value['branch'], example.key, compiler, platform, @@ -508,7 +509,7 @@ def gen_all_example_jobs() { return jobs } -def gen_mbed_os_example_job(repo, branch, example, compiler, platform, raas) { +def gen_mbed_os_example_job(BranchInfo info, repo, branch, example, compiler, platform, raas) { def jobs = [:] def job_name = "mbed-os-${example}-${platform}-${compiler}" @@ -551,7 +552,7 @@ mbed deploy -vv """ dir('mbed-os') { deleteDir() - checkout_repo.checkout_mbed_os() + checkout_repo.checkout_mbed_os(info) /* Check that python requirements are up to date */ sh """\ ulimit -f 20971520 @@ -609,16 +610,16 @@ mbedhtrun -m ${platform} ${tag_filter} \ } } -def gen_coverity_push_jobs() { +def gen_coverity_push_jobs(BranchInfo info) { def jobs = [:] def job_name = 'coverity-push' - if (env.MBED_TLS_BRANCH == "development") { + if (info.branch == "development") { jobs << instrumented_node_job('container-host', job_name) { try { dir("src") { deleteDir() - checkout_repo.checkout_tls_repo() + checkout_repo.checkout_tls_repo(info) sshagent([env.GIT_CREDENTIALS_ID]) { analysis.record_inner_timestamps('container-host', job_name) { sh 'git push origin HEAD:coverity_scan' @@ -664,11 +665,11 @@ def gen_release_jobs(BranchInfo info, String label_prefix='', boolean run_exampl } if (run_examples) { - jobs = jobs + gen_all_example_jobs() + jobs = jobs + gen_all_example_jobs(info) } if (env.PUSH_COVERITY == "true") { - jobs = jobs + gen_coverity_push_jobs() + jobs = jobs + gen_coverity_push_jobs(info) } return jobs diff --git a/vars/mbedtls-pr-test-Jenkinsfile b/vars/mbedtls-pr-test-Jenkinsfile index 7e430a668..fc1f1a7fd 100644 --- a/vars/mbedtls-pr-test-Jenkinsfile +++ b/vars/mbedtls-pr-test-Jenkinsfile @@ -42,4 +42,4 @@ /* main job */ library identifier: 'mbedtls-test@master', retriever: legacySCM(scm) -mbedtls.run_pr_job(is_production=false) +mbedtls.run_pr_job(false, env.MBED_TLS_BRANCH) diff --git a/vars/mbedtls-release-Jenkinsfile b/vars/mbedtls-release-Jenkinsfile index 70729152e..062c2229e 100644 --- a/vars/mbedtls-release-Jenkinsfile +++ b/vars/mbedtls-release-Jenkinsfile @@ -41,4 +41,4 @@ /* main job */ library identifier: 'mbedtls-test@master', retriever: legacySCM(scm) -mbedtls.run_release_job() +mbedtls.run_release_job(env.MBED_TLS_BRANCH) diff --git a/vars/mbedtls.groovy b/vars/mbedtls.groovy index ebf9d9f43..83d8543b6 100644 --- a/vars/mbedtls.groovy +++ b/vars/mbedtls.groovy @@ -53,7 +53,7 @@ void run_tls_tests(BranchInfo info, String label_prefix='') { } /* main job */ -def run_pr_job(is_production=true) { +void run_pr_job(boolean is_production, String branch) { analysis.main_record_timestamps('run_pr_job') { if (is_production) { // Cancel in-flight jobs for the same PR when a new job is launched @@ -114,7 +114,7 @@ def run_pr_job(is_production=true) { common.init_docker_images() stage('pre-test-checks') { - info = common.get_branch_information() + info = common.get_branch_information(branch) common.check_every_all_sh_component_will_be_run(info) } } catch (err) { @@ -142,17 +142,17 @@ def run_pr_job(is_production=true) { /* main job */ def run_job() { - run_pr_job() + run_pr_job(true, env.CHANGE_BRANCH) } -void run_release_job() { +void run_release_job(String branch) { BranchInfo info analysis.main_record_timestamps('run_release_job') { try { environ.set_tls_release_environment() common.init_docker_images() stage('branch-info') { - info = common.get_branch_information() + info = common.get_branch_information(branch) } try { stage('tls-testing') { @@ -172,7 +172,7 @@ void run_release_job() { stage('email-report') { if (currentBuild.rawBuild.causes[0] instanceof ParameterizedTimerTriggerCause || currentBuild.rawBuild.causes[0] instanceof TimerTrigger.TimerTriggerCause) { - common.send_email('Mbed TLS nightly tests', env.MBED_TLS_BRANCH, gen_jobs.failed_builds, gen_jobs.coverage_details) + common.send_email('Mbed TLS nightly tests', branch, gen_jobs.failed_builds, gen_jobs.coverage_details) } } } From 86f2433dad1860d4e552eede1bba526322684f46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Mon, 24 Jun 2024 17:52:23 +0200 Subject: [PATCH 04/13] JavaDoc-ify BranchInfo comments MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- src/org/mbed/tls/jenkins/BranchInfo.groovy | 30 +++++++++++----------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/org/mbed/tls/jenkins/BranchInfo.groovy b/src/org/mbed/tls/jenkins/BranchInfo.groovy index 1be43e75a..39cb67615 100755 --- a/src/org/mbed/tls/jenkins/BranchInfo.groovy +++ b/src/org/mbed/tls/jenkins/BranchInfo.groovy @@ -4,27 +4,27 @@ class BranchInfo { /** The name of the branch */ public String branch - /* Map from component name to chosen platform to run it, or to null - * if no platform has been chosen yet. */ + /** Map from component name to chosen platform to run it, or to null + * if no platform has been chosen yet. */ public Map all_all_sh_components - /* Whether scripts/min_requirements.py is available. Older branches don't - * have it, so they only get what's hard-coded in the docker files on Linux, - * and bare python on other platforms. */ + /** Whether scripts/min_requirements.py is available. Older branches don't + * have it, so they only get what's hard-coded in the docker files on Linux, + * and bare python on other platforms. */ public boolean has_min_requirements - /* Ad hoc overrides for scripts/ci.requirements.txt, used to adjust - * requirements on older branches that broke due to updates of the - * required packages. - * Only used if has_min_requirements is true. */ + /** Ad hoc overrides for scripts/ci.requirements.txt, used to adjust + * requirements on older branches that broke due to updates of the + * required packages. + * Only used if {@link #has_min_requirements} is {@code true}. */ public String python_requirements_override_content - /* Name of the file containing python_requirements_override_content. - * The string is injected into Unix sh and Windows cmd command lines, - * so it must not contain any shell escapes or directory separators. - * Only used if has_min_requirements is true. - * Set to an empty string for convenience if no override is to be - * done. */ + /** Name of the file containing python_requirements_override_content. + * The string is injected into Unix sh and Windows cmd command lines, + * so it must not contain any shell escapes or directory separators. + * Only used if {@link #has_min_requirements} is {@code true}. + * Set to an empty string for convenience if no override is to be + * done. */ public String python_requirements_override_file BranchInfo() { From a5bc97bb4a970d45bb5b07c6d604c043ad5f02cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Fri, 28 Jun 2024 16:54:02 +0200 Subject: [PATCH 05/13] Support testing multiple branches at once MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Each branch listed in MBED_TLS_BRANCH (separated by commas) will be checked out and tested in parallel. Signed-off-by: Bence Szépkúti --- vars/gen_jobs.groovy | 10 ++++---- vars/mbedtls.groovy | 57 +++++++++++++++++++++++++++++++++----------- 2 files changed, 48 insertions(+), 19 deletions(-) diff --git a/vars/gen_jobs.groovy b/vars/gen_jobs.groovy index bbf91c16f..47168465b 100644 --- a/vars/gen_jobs.groovy +++ b/vars/gen_jobs.groovy @@ -438,8 +438,8 @@ def gen_windows_jobs(BranchInfo info, String label_prefix='') { return jobs } -def gen_abi_api_checking_job(BranchInfo info, String platform) { - String job_name = 'ABI-API-checking' +def gen_abi_api_checking_job(BranchInfo info, String platform, String label_prefix = '') { + String job_name = "${label_prefix}ABI-API-checking" String script_in_docker = ''' tests/scripts/list-identifiers.sh --internal scripts/abi_check.py -o FETCH_HEAD -n HEAD -s identifiers --brief @@ -458,8 +458,8 @@ scripts/abi_check.py -o FETCH_HEAD -n HEAD -s identifiers --brief post_checkout: post_checkout) } -def gen_code_coverage_job(BranchInfo info, String platform) { - String job_name = 'code-coverage' +def gen_code_coverage_job(BranchInfo info, String platform, String label_prefix='') { + String job_name = "${label_prefix}code-coverage" String script_in_docker = ''' if grep -q -F coverage-summary.txt tests/scripts/basic-build-test.sh; then # New basic-build-test, generates coverage-summary.txt @@ -642,7 +642,7 @@ def gen_release_jobs(BranchInfo info, String label_prefix='', boolean run_exampl def jobs = [:] if (env.RUN_BASIC_BUILD_TEST == "true") { - jobs = jobs + gen_code_coverage_job(info, 'ubuntu-16.04-amd64'); + jobs = jobs + gen_code_coverage_job(info, 'ubuntu-16.04-amd64', label_prefix); } if (env.RUN_ALL_SH == "true") { diff --git a/vars/mbedtls.groovy b/vars/mbedtls.groovy index 83d8543b6..8f2af242a 100644 --- a/vars/mbedtls.groovy +++ b/vars/mbedtls.groovy @@ -27,14 +27,17 @@ import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException import org.mbed.tls.jenkins.BranchInfo -void run_tls_tests(BranchInfo info, String label_prefix='') { +void run_tls_tests(Collection infos) { try { def jobs = [:] - jobs = jobs + gen_jobs.gen_release_jobs(info, label_prefix, false) + infos.each { info -> + def label_prefix = infos.size() > 1 ? "$info.branch-" : '' + jobs << gen_jobs.gen_release_jobs(info, label_prefix, false) - if (env.RUN_ABI_CHECK == "true") { - jobs = jobs + gen_jobs.gen_abi_api_checking_job(info, 'ubuntu-18.04-amd64') + if (env.RUN_ABI_CHECK == "true") { + jobs << gen_jobs.gen_abi_api_checking_job(info, 'ubuntu-18.04-amd64', label_prefix) + } } jobs = common.wrap_report_errors(jobs) @@ -53,7 +56,11 @@ void run_tls_tests(BranchInfo info, String label_prefix='') { } /* main job */ -void run_pr_job(boolean is_production, String branch) { +void run_pr_job(boolean is_production, String branches) { + run_pr_job(is_production, branches.split(',') as List) +} + +void run_pr_job(boolean is_production, List branches) { analysis.main_record_timestamps('run_pr_job') { if (is_production) { // Cancel in-flight jobs for the same PR when a new job is launched @@ -94,7 +101,7 @@ void run_pr_job(boolean is_production, String branch) { } } - BranchInfo info + Map infos try { common.maybe_notify_github('PENDING', 'In progress') @@ -114,8 +121,15 @@ void run_pr_job(boolean is_production, String branch) { common.init_docker_images() stage('pre-test-checks') { - info = common.get_branch_information(branch) - common.check_every_all_sh_component_will_be_run(info) + def pre_test_checks = branches.collectEntries { + branch -> gen_jobs.job(branch) { + BranchInfo info = common.get_branch_information(branch) + common.check_every_all_sh_component_will_be_run(info) + return info + } + } + pre_test_checks.failFast = false + infos = parallel(pre_test_checks) } } catch (err) { def description = 'Pre-test checks failed.' @@ -128,7 +142,7 @@ void run_pr_job(boolean is_production, String branch) { try { stage('tls-testing') { - run_tls_tests(info) + run_tls_tests(infos.values()) } } finally { stage('result-analysis') { @@ -145,18 +159,33 @@ def run_job() { run_pr_job(true, env.CHANGE_BRANCH) } -void run_release_job(String branch) { - BranchInfo info +void run_release_job(String branches) { + run_release_job(branches.split(',') as List) +} + +void run_release_job(List branches) { analysis.main_record_timestamps('run_release_job') { try { environ.set_tls_release_environment() common.init_docker_images() + + Map infos stage('branch-info') { - info = common.get_branch_information(branch) + def branch_info_jobs = branches.collectEntries { + branch -> gen_jobs.job(branch) { + return common.get_branch_information(branch) + } + } + branch_info_jobs.failFast = false + infos = parallel(branch_info_jobs) } try { stage('tls-testing') { - def jobs = common.wrap_report_errors(gen_jobs.gen_release_jobs(info)) + def jobs = infos.collectEntries { branch, info -> + String prefix = branches.size() > 1 ? "$branch-" : '' + return gen_jobs.gen_release_jobs(info, prefix) + } + jobs = common.wrap_report_errors(jobs) jobs.failFast = false analysis.record_inner_timestamps('main', 'run_release_job') { parallel jobs @@ -172,7 +201,7 @@ void run_release_job(String branch) { stage('email-report') { if (currentBuild.rawBuild.causes[0] instanceof ParameterizedTimerTriggerCause || currentBuild.rawBuild.causes[0] instanceof TimerTrigger.TimerTriggerCause) { - common.send_email('Mbed TLS nightly tests', branch, gen_jobs.failed_builds, gen_jobs.coverage_details) + common.send_email('Mbed TLS nightly tests', branches.join(','), gen_jobs.failed_builds, gen_jobs.coverage_details) } } } From c81b708abdb17b82d1ef73c2bf6c29bb031429ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Tue, 2 Jul 2024 11:17:44 +0200 Subject: [PATCH 06/13] Add framework testing jobs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- vars/checkout_repo.groovy | 11 +++++++++++ vars/environ.groovy | 7 +++++-- vars/mbedtls-pr-test-Jenkinsfile | 4 +++- vars/mbedtls-release-Jenkinsfile | 2 ++ vars/mbedtls.groovy | 16 ++++++++++------ 5 files changed, 31 insertions(+), 9 deletions(-) diff --git a/vars/checkout_repo.groovy b/vars/checkout_repo.groovy index a99c4d8ff..0276489a6 100644 --- a/vars/checkout_repo.groovy +++ b/vars/checkout_repo.groovy @@ -73,6 +73,17 @@ Map checkout_tls_repo(String branch) { sh_or_bat 'git config --global url.git@github.com:.insteadOf https://github.com/' try { def result = checkout_report_errors(scm_config) + + dir('framework') { + if (env.TARGET_REPO == 'framework' && env.CHECKOUT_METHOD == 'scm') { + checkout_report_errors(scm) + } else if (env.FRAMEWORK_REPO && env.FRAMEWORK_BRANCH) { + checkout_report_errors(parametrized_repo(env.FRAMEWORK_REPO, env.FRAMEWORK_BRANCH)) + } else { + echo 'Using default framework version' + } + } + // After the clone, replicate it in the local config, so it is effective when running inside docker sh_or_bat ''' git config url.git@github.com:.insteadOf https://github.com/ && \ diff --git a/vars/environ.groovy b/vars/environ.groovy index 1203a882a..22a260d70 100644 --- a/vars/environ.groovy +++ b/vars/environ.groovy @@ -29,11 +29,14 @@ def set_common_environment() { env.VERBOSE_LOGS=1 } -def set_tls_pr_environment(is_production) { +void set_pr_environment(String target_repo, boolean is_production) { set_common_environment() env.JOB_TYPE = 'PR' - env.TARGET_REPO = 'tls' + env.TARGET_REPO = target_repo if (is_production) { + if (target_repo == 'framework') { + env.MBED_TLS_REPO = 'git@github.com:Mbed-TLS/mbedtls.git' + } set_common_pr_production_environment() } else { env.CHECKOUT_METHOD = 'parametrized' diff --git a/vars/mbedtls-pr-test-Jenkinsfile b/vars/mbedtls-pr-test-Jenkinsfile index fc1f1a7fd..938ee4e7a 100644 --- a/vars/mbedtls-pr-test-Jenkinsfile +++ b/vars/mbedtls-pr-test-Jenkinsfile @@ -25,6 +25,8 @@ * Repos and branches * - MBED_TLS_REPO * - MBED_TLS_BRANCH + * - FRAMEWORK_REPO + * - FRAMEWORK_BRANCH * * Test options * - RUN_FREEBSD @@ -42,4 +44,4 @@ /* main job */ library identifier: 'mbedtls-test@master', retriever: legacySCM(scm) -mbedtls.run_pr_job(false, env.MBED_TLS_BRANCH) +mbedtls.run_pr_job('tls', false, env.MBED_TLS_BRANCH) diff --git a/vars/mbedtls-release-Jenkinsfile b/vars/mbedtls-release-Jenkinsfile index 062c2229e..bc07c083b 100644 --- a/vars/mbedtls-release-Jenkinsfile +++ b/vars/mbedtls-release-Jenkinsfile @@ -25,6 +25,8 @@ * Repos and branches * - MBED_TLS_REPO * - MBED_TLS_BRANCH + * - FRAMEWORK_REPO + * - FRAMEWORK_BRANCH * * Test options * - RUN_BASIC_BUILD_TEST diff --git a/vars/mbedtls.groovy b/vars/mbedtls.groovy index 8f2af242a..e08772f97 100644 --- a/vars/mbedtls.groovy +++ b/vars/mbedtls.groovy @@ -56,11 +56,11 @@ void run_tls_tests(Collection infos) { } /* main job */ -void run_pr_job(boolean is_production, String branches) { - run_pr_job(is_production, branches.split(',') as List) +void run_pr_job(String target_repo, boolean is_production, String branches) { + run_pr_job(target_repo, is_production, branches.split(',') as List) } -void run_pr_job(boolean is_production, List branches) { +void run_pr_job(String target_repo, boolean is_production, List branches) { analysis.main_record_timestamps('run_pr_job') { if (is_production) { // Cancel in-flight jobs for the same PR when a new job is launched @@ -91,7 +91,7 @@ void run_pr_job(boolean is_production, List branches) { ]) } - environ.set_tls_pr_environment(is_production) + environ.set_pr_environment(target_repo, is_production) boolean is_merge_queue = env.BRANCH_NAME ==~ /gh-readonly-queue\/.*/ if (!is_merge_queue && currentBuild.rawBuild.getCause(Cause.UserIdCause) == null) { @@ -155,8 +155,12 @@ void run_pr_job(boolean is_production, List branches) { } /* main job */ -def run_job() { - run_pr_job(true, env.CHANGE_BRANCH) +void run_job() { + run_pr_job('tls', true, env.CHANGE_BRANCH) +} + +void run_framework_pr_job() { + run_pr_job('framework', true, ['development', 'mbedtls-3.6']) } void run_release_job(String branches) { From 9495321567c4683596176e835de3ca3d7d8c004d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Wed, 3 Jul 2024 13:47:23 +0200 Subject: [PATCH 07/13] Fix the exception type thrown by JobTimestamps MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit IllegalAccessError doesn't match the semantics used here. Signed-off-by: Bence Szépkúti --- src/org/mbed/tls/jenkins/JobTimestamps.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/org/mbed/tls/jenkins/JobTimestamps.groovy b/src/org/mbed/tls/jenkins/JobTimestamps.groovy index 5108245ba..5bc0606fb 100755 --- a/src/org/mbed/tls/jenkins/JobTimestamps.groovy +++ b/src/org/mbed/tls/jenkins/JobTimestamps.groovy @@ -55,7 +55,7 @@ class JobTimestamps { private static void set(String name, AtomicLong var, long val) { if (!var.compareAndSet(-1, val)) { - throw new IllegalAccessError("$name set twice") + throw new IllegalStateException("$name set twice") } } From 33efbb8388fad38d3417c4068ce418ca477d7190 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Wed, 3 Jul 2024 20:58:54 +0200 Subject: [PATCH 08/13] Set GIT_CREDENTIALS_ID in production jobs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes checkouts for productions jobs that target the framework repo. Signed-off-by: Bence Szépkúti --- vars/environ.groovy | 4 ++++ vars/gen_jobs.groovy | 5 +---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/vars/environ.groovy b/vars/environ.groovy index 22a260d70..37b074a6c 100644 --- a/vars/environ.groovy +++ b/vars/environ.groovy @@ -45,6 +45,10 @@ void set_pr_environment(String target_repo, boolean is_production) { def set_common_pr_production_environment() { env.CHECKOUT_METHOD = 'scm' + /* The credentials here are the SSH credentials for accessing the repositories. + They are defined at {JENKINS_URL}/credentials + This is a temporary workaround, this should really be set in the Jenkins job configs */ + env.GIT_CREDENTIALS_ID = common.is_open_ci_env ? "mbedtls-github-ssh" : "742b7080-e1cc-41c6-bf55-efb72013bc28" if (env.BRANCH_NAME ==~ /PR-\d+-merge/) { env.RUN_ABI_CHECK = 'true' } else { diff --git a/vars/gen_jobs.groovy b/vars/gen_jobs.groovy index 47168465b..a5715c219 100644 --- a/vars/gen_jobs.groovy +++ b/vars/gen_jobs.groovy @@ -445,11 +445,8 @@ tests/scripts/list-identifiers.sh --internal scripts/abi_check.py -o FETCH_HEAD -n HEAD -s identifiers --brief ''' - String credentials_id = common.is_open_ci_env ? "mbedtls-github-ssh" : "742b7080-e1cc-41c6-bf55-efb72013bc28" Closure post_checkout = { - /* The credentials here are the SSH credentials for accessing the repositories. - They are defined at {JENKINS_URL}/credentials */ - sshagent([credentials_id]) { + sshagent([env.GIT_CREDENTIALS_ID]) { sh "git fetch --depth 1 origin ${CHANGE_TARGET}" } } From 94574a3e4f96df00ffbb2dd96fccbb511950b5cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Tue, 9 Jul 2024 16:16:18 +0200 Subject: [PATCH 09/13] Handle multiple branches in analyze_results() MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit also refactors failed_builds and outcome_stashes to be per-branch values by moving them inside BranchInfo. Signed-off-by: Bence Szépkúti --- src/org/mbed/tls/jenkins/BranchInfo.groovy | 6 ++ vars/analysis.groovy | 111 +++++++++++---------- vars/common.groovy | 5 +- vars/gen_jobs.groovy | 22 ++-- vars/mbedtls.groovy | 14 ++- 5 files changed, 84 insertions(+), 74 deletions(-) diff --git a/src/org/mbed/tls/jenkins/BranchInfo.groovy b/src/org/mbed/tls/jenkins/BranchInfo.groovy index 39cb67615..9cdf525a8 100755 --- a/src/org/mbed/tls/jenkins/BranchInfo.groovy +++ b/src/org/mbed/tls/jenkins/BranchInfo.groovy @@ -27,10 +27,16 @@ class BranchInfo { * done. */ public String python_requirements_override_file + /** Keep track of builds that fail */ + final Set failed_builds + final Set outcome_stashes + BranchInfo() { this.all_all_sh_components = [:] this.has_min_requirements = false this.python_requirements_override_content = '' this.python_requirements_override_file = '' + this.failed_builds = [] + this.outcome_stashes = [] } } diff --git a/vars/analysis.groovy b/vars/analysis.groovy index b16a15c56..fb5c40bfa 100644 --- a/vars/analysis.groovy +++ b/vars/analysis.groovy @@ -41,9 +41,6 @@ import net.sf.json.JSONObject import org.mbed.tls.jenkins.BranchInfo import org.mbed.tls.jenkins.JobTimestamps -// A static field has its content preserved across stages. -@Field static outcome_stashes = [] - @Field private static ConcurrentMap> timestamps = new ConcurrentHashMap>(); @@ -217,71 +214,79 @@ void gather_timestamps() { } } -def stash_outcomes(job_name) { +void stash_outcomes(BranchInfo info, String job_name) { def stash_name = job_name + '-outcome' if (findFiles(glob: '*-outcome.csv')) { stash(name: stash_name, includes: '*-outcome.csv', allowEmpty: true) - outcome_stashes.add(stash_name) + info.outcome_stashes.add(stash_name) } } /** Process the outcome files from all the jobs */ -def analyze_results(BranchInfo info) { - // After running a partial run, there may not be any outcome file. - // In this case do nothing. - if (outcome_stashes.isEmpty()) { - echo 'outcome_stashes is empty, skipping result-analysis.' - return - } +void analyze_results(Collection infos) { + def job_map = infos.collectEntries { info -> + // After running a partial run, there may not be any outcome file. + // In this case do nothing. + // Set.isEmpty() seems bugged, use Groovy truth instead + if (!info.outcome_stashes) { + echo "outcome_stashes for branch $info.branch is empty, skipping result-analysis." + return [:] + } - String job_name = 'result-analysis' + String prefix = infos.size() > 1 ? "$info.branch-" : '' + String job_name = "${prefix}result-analysis" + String outcomes_csv = "${prefix}outcomes.csv" + String failures_csv = "${prefix}failures.csv" - Closure post_checkout = { - dir('csvs') { - for (stash_name in outcome_stashes) { - unstash(stash_name) + Closure post_checkout = { + dir('csvs') { + for (stash_name in info.outcome_stashes) { + unstash(stash_name) + } + sh "cat *.csv >'../$outcomes_csv'" + deleteDir() } - sh 'cat *.csv >../outcomes.csv' - deleteDir() - } - // The complete outcome file is 2.1GB uncompressed / 56MB compressed as I write. - // Often we just want the failures, so make an artifact with just those. - // Only produce a failure file if there was a failing job (otherwise - // we'd just waste time creating an empty file). - // - // Note that grep ';FAIL;' could pick up false positives, if another field such - // as test description or test suite was "FAIL". - if (gen_jobs.failed_builds) { - sh '''\ - LC_ALL=C grep ';FAIL;' outcomes.csv >"failures.csv" || [ $? -eq 1 ] - # Compress the failure list if it is large (for some value of large) - if [ "$(wc -c '$failures_csv' || [ \$? -eq 1 ] +# Compress the failure list if it is large (for some value of large) +if [ "\$(wc -c <'$failures_csv')" -gt 99999 ]; then + xz -0 -T0 '$failures_csv' +fi +""" + } } - } - String script_in_docker = '''\ -tests/scripts/analyze_outcomes.py outcomes.csv -''' + String script_in_docker = """\ +tests/scripts/analyze_outcomes.py '$outcomes_csv' +""" - Closure post_execution = { - sh 'xz -0 -T0 outcomes.csv' - archiveArtifacts(artifacts: 'outcomes.csv.xz, failures.csv*', - fingerprint: true, - allowEmptyArchive: true) - } + Closure post_execution = { + sh "xz -0 -T0 '$outcomes_csv'" + archiveArtifacts(artifacts: "${outcomes_csv}.xz, ${failures_csv}*", + fingerprint: true, + allowEmptyArchive: true) + } - def job_map = gen_jobs.gen_docker_job(info, - job_name, - 'helper-container-host', - 'ubuntu-22.04-amd64', - script_in_docker, - post_checkout: post_checkout, - post_execution: post_execution) - common.report_errors(job_name, job_map[job_name]) + return gen_jobs.gen_docker_job(info, + job_name, + 'helper-container-host', + 'ubuntu-22.04', + script_in_docker, + post_checkout: post_checkout, + post_execution: post_execution) + } + job_map = common.wrap_report_errors(job_map) + job_map.failFast = false + parallel(job_map) } diff --git a/vars/common.groovy b/vars/common.groovy index d07cdd1ec..045dd83a5 100644 --- a/vars/common.groovy +++ b/vars/common.groovy @@ -407,10 +407,9 @@ done ) } -def send_email(name, branch, failed_builds, coverage_details) { +void send_email(String name, String branch, Collection failed_builds, Map coverage_details) { if (failed_builds) { - keys = failed_builds.keySet() - failures = keys.join(", ") + failures = failed_builds.join(", ") emailbody = """ ${coverage_details['coverage']} diff --git a/vars/gen_jobs.groovy b/vars/gen_jobs.groovy index a5715c219..9c65f091d 100644 --- a/vars/gen_jobs.groovy +++ b/vars/gen_jobs.groovy @@ -26,10 +26,6 @@ import hudson.AbortException import org.mbed.tls.jenkins.BranchInfo -// Keep track of builds that fail. -// Use static field, so the is content preserved across stages. -@Field static failed_builds = [:] - //Record coverage details for reporting @Field coverage_details = ['coverage': 'Code coverage job did not run'] @@ -57,7 +53,7 @@ Map> gen_simple_windows_jobs(BranchInfo info, String labe } } } catch (err) { - failed_builds[label] = true + info.failed_builds << label throw (err) } finally { deleteDir() @@ -115,8 +111,8 @@ def platform_lacks_tls_tools(platform) { *
{@code post_execution}
* Hook that runs after running the script in Docker, * whether it succeeded or not. It can check the job's status by querying - * {@code gen_jobs.failed_builds[job_name]}, which is true if the job failed and - * absent otherwise. This hook should not throw an exception. + * {@link BranchInfo#failed_builds}, which contains {@code job_name} + * if the job failed. This hook should not throw an exception. *
* * @@ -171,7 +167,7 @@ fi } } } catch (err) { - failed_builds[job_name] = true + info.failed_builds << job_name throw (err) } finally { if (hooks.post_execution) { @@ -287,7 +283,7 @@ ${extra_setup_code} } } finally { dir('src') { - analysis.stash_outcomes(job_name) + analysis.stash_outcomes(info, job_name) } dir('src/tests/') { common.archive_zipped_log_files(job_name) @@ -295,7 +291,7 @@ ${extra_setup_code} } } } catch (err) { - failed_builds[job_name] = true + info.failed_builds << job_name throw (err) } finally { deleteDir() @@ -396,7 +392,7 @@ def gen_windows_testing_job(BranchInfo info, String toolchain, String label_pref } } } catch (exception) { - failed_builds[job_name] = true + info.failed_builds << job_name return exception } return null @@ -599,7 +595,7 @@ mbedhtrun -m ${platform} ${tag_filter} \ } } } catch (err) { - failed_builds[job_name] = true + info.failed_builds << job_name throw (err) } finally { deleteDir() @@ -624,7 +620,7 @@ def gen_coverity_push_jobs(BranchInfo info) { } } } catch (err) { - failed_builds[job_name]= true + info.failed_builds << job_name throw (err) } finally { deleteDir() diff --git a/vars/mbedtls.groovy b/vars/mbedtls.groovy index e08772f97..18b318eaa 100644 --- a/vars/mbedtls.groovy +++ b/vars/mbedtls.groovy @@ -47,7 +47,7 @@ void run_tls_tests(Collection infos) { parallel jobs } } catch (err) { - def failed_names = gen_jobs.failed_builds.keySet().sort().join(" ") + def failed_names = infos.collectMany({ info -> info.failed_builds}).sort().join(" ") echo "Caught: ${err}" echo "Failed jobs: ${failed_names}" common.maybe_notify_github('FAILURE', "Failures: ${failed_names}") @@ -146,7 +146,7 @@ void run_pr_job(String target_repo, boolean is_production, List branches } } finally { stage('result-analysis') { - analysis.analyze_results(info) + analysis.analyze_results(infos.values()) } } @@ -169,11 +169,11 @@ void run_release_job(String branches) { void run_release_job(List branches) { analysis.main_record_timestamps('run_release_job') { + Map infos try { environ.set_tls_release_environment() common.init_docker_images() - Map infos stage('branch-info') { def branch_info_jobs = branches.collectEntries { branch -> gen_jobs.job(branch) { @@ -198,14 +198,18 @@ void run_release_job(List branches) { } finally { stage('result-analysis') { - analysis.analyze_results(info) + analysis.analyze_results(infos.values()) } } } finally { stage('email-report') { if (currentBuild.rawBuild.causes[0] instanceof ParameterizedTimerTriggerCause || currentBuild.rawBuild.causes[0] instanceof TimerTrigger.TimerTriggerCause) { - common.send_email('Mbed TLS nightly tests', branches.join(','), gen_jobs.failed_builds, gen_jobs.coverage_details) + common.send_email('Mbed TLS nightly tests', + branches.join(','), + infos.values().collectMany { info -> info.failed_builds }, + gen_jobs.coverage_details + ) } } } From dc9d9845b59084cad52c1bd29db832f453db7586 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Tue, 9 Jul 2024 16:37:54 +0200 Subject: [PATCH 10/13] Refactor send_email to take a BranchInfo value MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- vars/common.groovy | 6 ++++-- vars/mbedtls.groovy | 3 +-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/vars/common.groovy b/vars/common.groovy index 045dd83a5..4e9f86bfa 100644 --- a/vars/common.groovy +++ b/vars/common.groovy @@ -407,7 +407,9 @@ done ) } -void send_email(String name, String branch, Collection failed_builds, Map coverage_details) { +void send_email(String name, Collection infos, Map coverage_details) { + String branches = infos*.branch.join(',') + def failed_builds = infos.collectMany { info -> info.failed_builds} if (failed_builds) { failures = failed_builds.join(", ") emailbody = """ @@ -427,7 +429,7 @@ Logs: ${env.BUILD_URL} recipients = env.TEST_PASS_EMAIL_ADDRESS } subject = ((is_open_ci_env ? "TF Open CI" : "Internal CI") + " ${name} " + \ - (failed_builds ? "failed" : "passed") + "! (branch: ${branch})") + (failed_builds ? "failed" : "passed") + "! (branches: ${branches})") echo subject echo emailbody emailext body: emailbody, diff --git a/vars/mbedtls.groovy b/vars/mbedtls.groovy index 18b318eaa..dbd401321 100644 --- a/vars/mbedtls.groovy +++ b/vars/mbedtls.groovy @@ -206,8 +206,7 @@ void run_release_job(List branches) { if (currentBuild.rawBuild.causes[0] instanceof ParameterizedTimerTriggerCause || currentBuild.rawBuild.causes[0] instanceof TimerTrigger.TimerTriggerCause) { common.send_email('Mbed TLS nightly tests', - branches.join(','), - infos.values().collectMany { info -> info.failed_builds }, + infos.values(), gen_jobs.coverage_details ) } From 8d9b2b78df05a3bd0ef90e4cfe021373fc65c652 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Tue, 9 Jul 2024 16:47:10 +0200 Subject: [PATCH 11/13] Convert coverage_details into a per-branch value MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- src/org/mbed/tls/jenkins/BranchInfo.groovy | 4 ++++ vars/common.groovy | 7 ++++--- vars/gen_jobs.groovy | 7 +------ 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/org/mbed/tls/jenkins/BranchInfo.groovy b/src/org/mbed/tls/jenkins/BranchInfo.groovy index 9cdf525a8..7384a852b 100755 --- a/src/org/mbed/tls/jenkins/BranchInfo.groovy +++ b/src/org/mbed/tls/jenkins/BranchInfo.groovy @@ -31,6 +31,9 @@ class BranchInfo { final Set failed_builds final Set outcome_stashes + /** Record coverage details for reporting */ + String coverage_details + BranchInfo() { this.all_all_sh_components = [:] this.has_min_requirements = false @@ -38,5 +41,6 @@ class BranchInfo { this.python_requirements_override_file = '' this.failed_builds = [] this.outcome_stashes = [] + this.coverage_details = 'Code coverage job did not run' } } diff --git a/vars/common.groovy b/vars/common.groovy index 4e9f86bfa..746d6442d 100644 --- a/vars/common.groovy +++ b/vars/common.groovy @@ -407,13 +407,14 @@ done ) } -void send_email(String name, Collection infos, Map coverage_details) { +void send_email(String name, Collection infos) { String branches = infos*.branch.join(',') def failed_builds = infos.collectMany { info -> info.failed_builds} + String coverage_details = infos.collect({info -> "$info.branch:\n$info.coverage_details"}).join('\n\n') if (failed_builds) { failures = failed_builds.join(", ") emailbody = """ -${coverage_details['coverage']} +$coverage_details Logs: ${env.BUILD_URL} @@ -422,7 +423,7 @@ Failures: ${failures} recipients = env.TEST_FAIL_EMAIL_ADDRESS } else { emailbody = """ -${coverage_details['coverage']} +$coverage_details Logs: ${env.BUILD_URL} """ diff --git a/vars/gen_jobs.groovy b/vars/gen_jobs.groovy index 9c65f091d..5ae589b48 100644 --- a/vars/gen_jobs.groovy +++ b/vars/gen_jobs.groovy @@ -19,16 +19,11 @@ import java.util.concurrent.Callable -import groovy.transform.Field - import net.sf.json.JSONObject import hudson.AbortException import org.mbed.tls.jenkins.BranchInfo -//Record coverage details for reporting -@Field coverage_details = ['coverage': 'Code coverage job did not run'] - static Map> job(String label, Closure body) { return Collections.singletonMap(label, body) } @@ -469,7 +464,7 @@ fi Closure post_success = { String coverage_log = readFile('coverage-summary.txt') - coverage_details['coverage'] = coverage_log.substring( + info.coverage_details = coverage_log.substring( coverage_log.indexOf('\nCoverage\n') + 1 ) } From 7caa948f9d2fa5ac5710e943e049daac2460922f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Thu, 24 Oct 2024 13:05:38 +0200 Subject: [PATCH 12/13] Fix platform name MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- vars/analysis.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vars/analysis.groovy b/vars/analysis.groovy index fb5c40bfa..ee6f0964f 100644 --- a/vars/analysis.groovy +++ b/vars/analysis.groovy @@ -281,7 +281,7 @@ tests/scripts/analyze_outcomes.py '$outcomes_csv' return gen_jobs.gen_docker_job(info, job_name, 'helper-container-host', - 'ubuntu-22.04', + 'ubuntu-22.04-amd64', script_in_docker, post_checkout: post_checkout, post_execution: post_execution) From bdc39b29c78a8ccc8215c3009bd7776a9d0a9f8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bence=20Sz=C3=A9pk=C3=BAti?= Date: Thu, 24 Oct 2024 14:27:57 +0200 Subject: [PATCH 13/13] Only compress outcomes.csv if the file exists MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Bence Szépkúti --- vars/analysis.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vars/analysis.groovy b/vars/analysis.groovy index ee6f0964f..89d7042d5 100644 --- a/vars/analysis.groovy +++ b/vars/analysis.groovy @@ -272,7 +272,7 @@ tests/scripts/analyze_outcomes.py '$outcomes_csv' """ Closure post_execution = { - sh "xz -0 -T0 '$outcomes_csv'" + sh "[ -f '$outcomes_csv' ] && xz -0 -T0 '$outcomes_csv'" archiveArtifacts(artifacts: "${outcomes_csv}.xz, ${failures_csv}*", fingerprint: true, allowEmptyArchive: true)