Skip to content

Commit

Permalink
Merge pull request #170 from Mbed-TLS/dev/bensze01/framework
Browse files Browse the repository at this point in the history
Adapt PR and release jobs to enable testing of the Framework repo
  • Loading branch information
mpg authored Nov 5, 2024
2 parents 43019ad + bdc39b2 commit b2f594f
Show file tree
Hide file tree
Showing 10 changed files with 218 additions and 156 deletions.
43 changes: 28 additions & 15 deletions src/org/mbed/tls/jenkins/BranchInfo.groovy
Original file line number Diff line number Diff line change
@@ -1,33 +1,46 @@
package org.mbed.tls.jenkins

class BranchInfo {
/* Map from component name to chosen platform to run it, or to null
* if no platform has been chosen yet. */
/** The name of the branch */
public String branch

/** Map from component name to chosen platform to run it, or to null
* if no platform has been chosen yet. */
public Map<String, String> all_all_sh_components

/* Whether scripts/min_requirements.py is available. Older branches don't
* have it, so they only get what's hard-coded in the docker files on Linux,
* and bare python on other platforms. */
/** Whether scripts/min_requirements.py is available. Older branches don't
* have it, so they only get what's hard-coded in the docker files on Linux,
* and bare python on other platforms. */
public boolean has_min_requirements

/* Ad hoc overrides for scripts/ci.requirements.txt, used to adjust
* requirements on older branches that broke due to updates of the
* required packages.
* Only used if has_min_requirements is true. */
/** Ad hoc overrides for scripts/ci.requirements.txt, used to adjust
* requirements on older branches that broke due to updates of the
* required packages.
* Only used if {@link #has_min_requirements} is {@code true}. */
public String python_requirements_override_content

/* Name of the file containing python_requirements_override_content.
* The string is injected into Unix sh and Windows cmd command lines,
* so it must not contain any shell escapes or directory separators.
* Only used if has_min_requirements is true.
* Set to an empty string for convenience if no override is to be
* done. */
/** Name of the file containing python_requirements_override_content.
* The string is injected into Unix sh and Windows cmd command lines,
* so it must not contain any shell escapes or directory separators.
* Only used if {@link #has_min_requirements} is {@code true}.
* Set to an empty string for convenience if no override is to be
* done. */
public String python_requirements_override_file

/** Keep track of builds that fail */
final Set<String> failed_builds
final Set<String> outcome_stashes

/** Record coverage details for reporting */
String coverage_details

BranchInfo() {
this.all_all_sh_components = [:]
this.has_min_requirements = false
this.python_requirements_override_content = ''
this.python_requirements_override_file = ''
this.failed_builds = []
this.outcome_stashes = []
this.coverage_details = 'Code coverage job did not run'
}
}
2 changes: 1 addition & 1 deletion src/org/mbed/tls/jenkins/JobTimestamps.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class JobTimestamps {

private static void set(String name, AtomicLong var, long val) {
if (!var.compareAndSet(-1, val)) {
throw new IllegalAccessError("$name set twice")
throw new IllegalStateException("$name set twice")
}
}

Expand Down
111 changes: 58 additions & 53 deletions vars/analysis.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,6 @@ import net.sf.json.JSONObject
import org.mbed.tls.jenkins.BranchInfo
import org.mbed.tls.jenkins.JobTimestamps

// A static field has its content preserved across stages.
@Field static outcome_stashes = []

@Field private static ConcurrentMap<String, ConcurrentMap<String, JobTimestamps>> timestamps =
new ConcurrentHashMap<String, ConcurrentMap<String, JobTimestamps>>();

Expand Down Expand Up @@ -217,71 +214,79 @@ void gather_timestamps() {
}
}

def stash_outcomes(job_name) {
void stash_outcomes(BranchInfo info, String job_name) {
def stash_name = job_name + '-outcome'
if (findFiles(glob: '*-outcome.csv')) {
stash(name: stash_name,
includes: '*-outcome.csv',
allowEmpty: true)
outcome_stashes.add(stash_name)
info.outcome_stashes.add(stash_name)
}
}

/** Process the outcome files from all the jobs */
def analyze_results(BranchInfo info) {
// After running a partial run, there may not be any outcome file.
// In this case do nothing.
if (outcome_stashes.isEmpty()) {
echo 'outcome_stashes is empty, skipping result-analysis.'
return
}
void analyze_results(Collection<BranchInfo> infos) {
def job_map = infos.collectEntries { info ->
// After running a partial run, there may not be any outcome file.
// In this case do nothing.
// Set.isEmpty() seems bugged, use Groovy truth instead
if (!info.outcome_stashes) {
echo "outcome_stashes for branch $info.branch is empty, skipping result-analysis."
return [:]
}

String job_name = 'result-analysis'
String prefix = infos.size() > 1 ? "$info.branch-" : ''
String job_name = "${prefix}result-analysis"
String outcomes_csv = "${prefix}outcomes.csv"
String failures_csv = "${prefix}failures.csv"

Closure post_checkout = {
dir('csvs') {
for (stash_name in outcome_stashes) {
unstash(stash_name)
Closure post_checkout = {
dir('csvs') {
for (stash_name in info.outcome_stashes) {
unstash(stash_name)
}
sh "cat *.csv >'../$outcomes_csv'"
deleteDir()
}
sh 'cat *.csv >../outcomes.csv'
deleteDir()
}

// The complete outcome file is 2.1GB uncompressed / 56MB compressed as I write.
// Often we just want the failures, so make an artifact with just those.
// Only produce a failure file if there was a failing job (otherwise
// we'd just waste time creating an empty file).
//
// Note that grep ';FAIL;' could pick up false positives, if another field such
// as test description or test suite was "FAIL".
if (gen_jobs.failed_builds) {
sh '''\
LC_ALL=C grep ';FAIL;' outcomes.csv >"failures.csv" || [ $? -eq 1 ]
# Compress the failure list if it is large (for some value of large)
if [ "$(wc -c <failures.csv)" -gt 99999 ]; then
xz -0 -T0 failures.csv
fi
'''
// The complete outcome file is 2.1GB uncompressed / 56MB compressed as I write.
// Often we just want the failures, so make an artifact with just those.
// Only produce a failure file if there was a failing job (otherwise
// we'd just waste time creating an empty file).
//
// Note that grep ';FAIL;' could pick up false positives, if another field such
// as test description or test suite was "FAIL".
if (info.failed_builds) {
sh """\
LC_ALL=C grep ';FAIL;' outcomes.csv >'$failures_csv' || [ \$? -eq 1 ]
# Compress the failure list if it is large (for some value of large)
if [ "\$(wc -c <'$failures_csv')" -gt 99999 ]; then
xz -0 -T0 '$failures_csv'
fi
"""
}
}
}

String script_in_docker = '''\
tests/scripts/analyze_outcomes.py outcomes.csv
'''
String script_in_docker = """\
tests/scripts/analyze_outcomes.py '$outcomes_csv'
"""

Closure post_execution = {
sh 'xz -0 -T0 outcomes.csv'
archiveArtifacts(artifacts: 'outcomes.csv.xz, failures.csv*',
fingerprint: true,
allowEmptyArchive: true)
}
Closure post_execution = {
sh "[ -f '$outcomes_csv' ] && xz -0 -T0 '$outcomes_csv'"
archiveArtifacts(artifacts: "${outcomes_csv}.xz, ${failures_csv}*",
fingerprint: true,
allowEmptyArchive: true)
}

def job_map = gen_jobs.gen_docker_job(info,
job_name,
'helper-container-host',
'ubuntu-22.04-amd64',
script_in_docker,
post_checkout: post_checkout,
post_execution: post_execution)
common.report_errors(job_name, job_map[job_name])
return gen_jobs.gen_docker_job(info,
job_name,
'helper-container-host',
'ubuntu-22.04-amd64',
script_in_docker,
post_checkout: post_checkout,
post_execution: post_execution)
}
job_map = common.wrap_report_errors(job_map)
job_map.failFast = false
parallel(job_map)
}
25 changes: 18 additions & 7 deletions vars/checkout_repo.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,12 @@ Map<String, String> checkout_report_errors(scm_config) {
}
}

Map<String, String> checkout_repo() {
Map<String, String> checkout_tls_repo(String branch) {
def scm_config
if (env.TARGET_REPO == 'tls' && env.CHECKOUT_METHOD == 'scm') {
scm_config = scm
} else {
scm_config = parametrized_repo(env.MBED_TLS_REPO, env.MBED_TLS_BRANCH)
scm_config = parametrized_repo(env.MBED_TLS_REPO, branch)
}

// Use bilingual scripts when manipulating the git config
Expand All @@ -73,6 +73,17 @@ Map<String, String> checkout_repo() {
sh_or_bat 'git config --global [email protected]:.insteadOf https://github.com/'
try {
def result = checkout_report_errors(scm_config)

dir('framework') {
if (env.TARGET_REPO == 'framework' && env.CHECKOUT_METHOD == 'scm') {
checkout_report_errors(scm)
} else if (env.FRAMEWORK_REPO && env.FRAMEWORK_BRANCH) {
checkout_report_errors(parametrized_repo(env.FRAMEWORK_REPO, env.FRAMEWORK_BRANCH))
} else {
echo 'Using default framework version'
}
}

// After the clone, replicate it in the local config, so it is effective when running inside docker
sh_or_bat '''
git config [email protected]:.insteadOf https://github.com/ && \
Expand All @@ -85,8 +96,8 @@ git submodule foreach --recursive git config [email protected]:.insteadOf https
}
}

Map<String, String> checkout_repo(BranchInfo info) {
Map<String, String> m = checkout_repo()
Map<String, String> checkout_tls_repo(BranchInfo info) {
Map<String, String> m = checkout_tls_repo(info.branch)
write_overrides(info)
return m
}
Expand Down Expand Up @@ -117,7 +128,7 @@ Map<String, Object> parametrized_repo(String repo, String branch) {
]
}

def checkout_mbed_os() {
def checkout_mbed_os(BranchInfo info) {
checkout_report_errors([
scm: [
$class: 'GitSCM',
Expand All @@ -130,12 +141,12 @@ def checkout_mbed_os() {
],
]
])
if (env.MBED_TLS_BRANCH) {
if (info != null) {
dir('features/mbedtls/importer') {
dir('TARGET_IGNORE/mbedtls')
{
deleteDir()
checkout_repo()
checkout_tls_repo(info)
}
sh """\
ulimit -f 20971520
Expand Down
22 changes: 13 additions & 9 deletions vars/common.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -224,8 +224,10 @@ docker run -u \$(id -u):\$(id -g) -e MAKEFLAGS -e VERBOSE_LOGS $env_args --rm --
/* Gather information about the branch that determines how to set up the
* test environment.
* In particular, get components of all.sh for Linux platforms. */
BranchInfo get_branch_information() {
BranchInfo get_branch_information(String branch) {
BranchInfo info = new BranchInfo()
info.branch = branch

Map<String, Object> jobs = [:]

jobs << gen_jobs.job('all-platforms') {
Expand All @@ -236,7 +238,7 @@ BranchInfo get_branch_information() {

dir('src') {
deleteDir()
checkout_repo.checkout_repo()
checkout_repo.checkout_tls_repo(branch)

info.has_min_requirements = fileExists('scripts/min_requirements.py')

Expand Down Expand Up @@ -286,7 +288,7 @@ BranchInfo get_branch_information() {
try {
dir('src') {
deleteDir()
checkout_repo.checkout_repo()
checkout_repo.checkout_tls_repo(branch)
}
get_docker_image(platform)
def all_sh_help = sh(
Expand Down Expand Up @@ -411,12 +413,14 @@ done
)
}

def send_email(name, branch, failed_builds, coverage_details) {
void send_email(String name, Collection<BranchInfo> infos) {
String branches = infos*.branch.join(',')
def failed_builds = infos.collectMany { info -> info.failed_builds}
String coverage_details = infos.collect({info -> "$info.branch:\n$info.coverage_details"}).join('\n\n')
if (failed_builds) {
keys = failed_builds.keySet()
failures = keys.join(", ")
failures = failed_builds.join(", ")
emailbody = """
${coverage_details['coverage']}
$coverage_details
Logs: ${env.BUILD_URL}
Expand All @@ -425,14 +429,14 @@ Failures: ${failures}
recipients = env.TEST_FAIL_EMAIL_ADDRESS
} else {
emailbody = """
${coverage_details['coverage']}
$coverage_details
Logs: ${env.BUILD_URL}
"""
recipients = env.TEST_PASS_EMAIL_ADDRESS
}
subject = ((is_open_ci_env ? "TF Open CI" : "Internal CI") + " ${name} " + \
(failed_builds ? "failed" : "passed") + "! (branch: ${branch})")
(failed_builds ? "failed" : "passed") + "! (branches: ${branches})")
echo subject
echo emailbody
emailext body: emailbody,
Expand Down
18 changes: 9 additions & 9 deletions vars/environ.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -29,21 +29,26 @@ def set_common_environment() {
env.VERBOSE_LOGS=1
}

def set_tls_pr_environment(is_production) {
void set_pr_environment(String target_repo, boolean is_production) {
set_common_environment()
env.JOB_TYPE = 'PR'
env.TARGET_REPO = 'tls'
env.REPO_TO_CHECKOUT = 'tls'
env.TARGET_REPO = target_repo
if (is_production) {
if (target_repo == 'framework') {
env.MBED_TLS_REPO = '[email protected]:Mbed-TLS/mbedtls.git'
}
set_common_pr_production_environment()
set_tls_pr_production_environment()
} else {
env.CHECKOUT_METHOD = 'parametrized'
}
}

def set_common_pr_production_environment() {
env.CHECKOUT_METHOD = 'scm'
/* The credentials here are the SSH credentials for accessing the repositories.
They are defined at {JENKINS_URL}/credentials
This is a temporary workaround, this should really be set in the Jenkins job configs */
env.GIT_CREDENTIALS_ID = common.is_open_ci_env ? "mbedtls-github-ssh" : "742b7080-e1cc-41c6-bf55-efb72013bc28"
if (env.BRANCH_NAME ==~ /PR-\d+-merge/) {
env.RUN_ABI_CHECK = 'true'
} else {
Expand All @@ -62,15 +67,10 @@ def set_common_pr_production_environment() {
}
}

def set_tls_pr_production_environment() {
env.MBED_TLS_BRANCH = env.CHANGE_BRANCH
}

def set_tls_release_environment() {
set_common_environment()
env.JOB_TYPE = 'release'
env.TARGET_REPO = 'tls'
env.REPO_TO_CHECKOUT = 'tls'
env.CHECKOUT_METHOD = 'parametrized'
}

Expand Down
Loading

0 comments on commit b2f594f

Please sign in to comment.