diff --git a/.github/workflows/reports-scheduler-test-and-build-workflow.yml b/.github/workflows/reports-scheduler-test-and-build-workflow.yml index 327018bb..4eefba56 100644 --- a/.github/workflows/reports-scheduler-test-and-build-workflow.yml +++ b/.github/workflows/reports-scheduler-test-and-build-workflow.yml @@ -15,6 +15,7 @@ jobs: java: - 11 - 17 + - 21 runs-on: ubuntu-latest container: # using the same image which is used by opensearch-build team to build the OpenSearch Distribution @@ -68,6 +69,7 @@ jobs: java: - 11 - 17 + - 21 runs-on: windows-latest steps: diff --git a/build-tools/esplugin-coverage.gradle b/build-tools/esplugin-coverage.gradle index 3e8815fe..a2520874 100644 --- a/build-tools/esplugin-coverage.gradle +++ b/build-tools/esplugin-coverage.gradle @@ -54,8 +54,8 @@ jacocoTestReport { sourceDirectories.from = "src/main/kotlin" classDirectories.from = sourceSets.main.output reports { - html.enabled = true // human readable - xml.enabled = true // for coverlay + html.required = true // human readable + xml.required = true // for coverlay } } @@ -65,8 +65,7 @@ allprojects{ testClusters.integTest { if (Os.isFamily(Os.FAMILY_WINDOWS)) { - // Replacing build with absolute path to fix the error "error opening zip file or JAR manifest missing : /build/tmp/expandedArchives/..../jacocoagent.jar" - jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('build',"${buildDir}") + jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}" } else { jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('javaagent:','javaagent:/') } diff --git a/build-tools/pkgbuild.gradle b/build-tools/pkgbuild.gradle index 8c4dab9b..89358425 100644 --- a/build-tools/pkgbuild.gradle +++ b/build-tools/pkgbuild.gradle @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -apply plugin: 'nebula.ospackage' +apply plugin: 'com.netflix.nebula.ospackage' // This is afterEvaluate because the bundlePlugin ZIP task is updated afterEvaluate and changes the ZIP name to match the plugin name afterEvaluate { @@ -40,26 +40,10 @@ afterEvaluate { buildRpm { arch = 'NOARCH' dependsOn 'assemble' - finalizedBy 'renameRpm' - task renameRpm(type: Copy) { - from("$buildDir/distributions") - into("$buildDir/distributions") - include archiveName - rename archiveName, "${packageName}-${version}.rpm" - doLast { delete file("$buildDir/distributions/$archiveName") } - } } buildDeb { arch = 'all' dependsOn 'assemble' - finalizedBy 'renameDeb' - task renameDeb(type: Copy) { - from("$buildDir/distributions") - into("$buildDir/distributions") - include archiveName - rename archiveName, "${packageName}-${version}.deb" - doLast { delete file("$buildDir/distributions/$archiveName") } - } } } diff --git a/build.gradle b/build.gradle index 56ea79b0..7ee2a3a6 100644 --- a/build.gradle +++ b/build.gradle @@ -41,13 +41,13 @@ buildscript { classpath "${opensearch_group}.gradle:build-tools:${opensearch_version}" classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:${kotlin_version}" classpath "org.jetbrains.kotlin:kotlin-allopen:${kotlin_version}" - classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.22.0" - classpath "org.jacoco:org.jacoco.agent:0.8.8" + classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.23.4" + classpath "org.jacoco:org.jacoco.agent:0.8.11" } } plugins { - id 'nebula.ospackage' version "8.3.0" + id "com.netflix.nebula.ospackage-base" version "11.6.0" id "com.dorongold.task-tree" version "1.5" id 'java-library' } @@ -132,6 +132,8 @@ configurations.all { if (it.state != Configuration.State.UNRESOLVED) return resolutionStrategy { force "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" + force "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${kotlin_version}" + force "org.jetbrains.kotlin:kotlin-reflect:${kotlin_version}" force "org.jetbrains.kotlin:kotlin-stdlib-common:${kotlin_version}" force "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.11.4" force "org.yaml:snakeyaml:2.0" @@ -324,6 +326,189 @@ testClusters.integTest { setting 'path.repo', repo.absolutePath } +String baseVersion = "1.3.2" +String bwcVersion = baseVersion + ".0" +String currentVersion = opensearch_version.replace("-SNAPSHOT","") +String baseName = "reportsSchedulerBwcCluster" +String bwcFilePath = "src/test/resources/bwc" +String bwcReportsPlugin = "opensearch-reports-scheduler-" + bwcVersion + ".zip" +String bwcJobSchedulerPlugin = "opensearch-job-scheduler-" + bwcVersion + ".zip" +String bwcReportsSchedulerURL = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/1.3.2/latest/linux/x64/tar/builds/opensearch/plugins/" + bwcReportsPlugin +String bwcJobSchedulerURL = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/1.3.2/latest/linux/x64/tar/builds/opensearch/plugins/" + bwcJobSchedulerPlugin + +2.times {i -> + testClusters { + "${baseName}$i" { + testDistribution = "ARCHIVE" + versions = [baseVersion, opensearch_version] + numberOfNodes = 3 + plugin(provider(new Callable(){ + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + File dir = new File(bwcFilePath + "/job-scheduler/" + bwcVersion) + if (!dir.exists()) { + dir.mkdirs() + } + File file = new File(dir, "opensearch-job-scheduler-" + bwcVersion + ".zip") + if (!file.exists()) { + new URL(bwcJobSchedulerURL).withInputStream{ ins -> file.withOutputStream{ it << ins }} + } + return fileTree(bwcFilePath + "/job-scheduler/" + bwcVersion).getSingleFile() + } + } + } + })) + plugin(provider(new Callable(){ + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + File dir = new File(bwcFilePath + "/reports-scheduler/" + bwcVersion) + if (!dir.exists()) { + dir.mkdirs() + } + File file = new File(dir, "opensearch-reports-scheduler-" + bwcVersion + ".zip") + if (!file.exists()) { + new URL(bwcReportsSchedulerURL).withInputStream{ ins -> file.withOutputStream{ it << ins }} + } + return fileTree(bwcFilePath + "/reports-scheduler/" + bwcVersion).getSingleFile() + } + } + } + })) + setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" + setting 'http.content_type.required', 'true' + } + } +} + +List> plugins = [] + +// Ensure the artifact for the current project version is available to be used for the bwc tests +task prepareBwcTests { + dependsOn bundle + doLast { + plugins = [ + provider(new Callable(){ + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + return configurations.zipArchive.asFileTree.getSingleFile() + } + } + } + }), + project.getObjects().fileProperty().value(bundle.getArchiveFile()) + ] + } +} + +// Create two test clusters with 3 nodes of the old version +2.times {i -> + task "${baseName}#oldVersionClusterTask$i"(type: StandaloneRestIntegTestTask) { + dependsOn 'prepareBwcTests' + useCluster testClusters."${baseName}$i" + filter { + includeTestsMatching "org.opensearch.integTest.bwc.*IT" + } + systemProperty 'tests.rest.bwcsuite', 'old_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'old' + systemProperty 'tests.plugin_bwc_version', bwcVersion + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}$i".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}$i".getName()}") + } +} + +// Upgrade one node of the old cluster to new OpenSearch version with upgraded plugin version. +// This results in a mixed cluster with 2 nodes on the old version and 1 upgraded node. +// This is also used as a one third upgraded cluster for a rolling upgrade. +task "${baseName}#mixedClusterTask"(type: StandaloneRestIntegTestTask) { + useCluster testClusters."${baseName}0" + dependsOn "${baseName}#oldVersionClusterTask0" + doFirst { + testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins) + } + filter { + includeTestsMatching "org.opensearch.integTest.bwc.*IT" + } + systemProperty 'tests.rest.bwcsuite', 'mixed_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'first' + systemProperty 'tests.plugin_bwc_version', bwcVersion + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}") +} + +// Upgrade the second node to new OpenSearch version with upgraded plugin version after the first node is upgraded. +// This results in a mixed cluster with 1 node on the old version and 2 upgraded nodes. +// This is used for rolling upgrade. +task "${baseName}#twoThirdsUpgradedClusterTask"(type: StandaloneRestIntegTestTask) { + dependsOn "${baseName}#mixedClusterTask" + useCluster testClusters."${baseName}0" + doFirst { + testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins) + } + filter { + includeTestsMatching "org.opensearch.integTest.bwc.*IT" + } + systemProperty 'tests.rest.bwcsuite', 'mixed_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'second' + systemProperty 'tests.plugin_bwc_version', bwcVersion + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}") +} + +// Upgrade the third node to new OpenSearch version with upgraded plugin version after the second node is upgraded. +// This results in a fully upgraded cluster. +// This is used for rolling upgrade. +task "${baseName}#rollingUpgradeClusterTask"(type: StandaloneRestIntegTestTask) { + dependsOn "${baseName}#twoThirdsUpgradedClusterTask" + useCluster testClusters."${baseName}0" + doFirst { + testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins) + } + filter { + includeTestsMatching "org.opensearch.integTest.bwc.*IT" + } + mustRunAfter "${baseName}#mixedClusterTask" + systemProperty 'tests.rest.bwcsuite', 'mixed_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'third' + systemProperty 'tests.plugin_bwc_version', bwcVersion + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}") +} + +// Upgrade all the nodes of the old cluster to new OpenSearch version with upgraded plugin version +// at the same time resulting in a fully upgraded cluster. +task "${baseName}#fullRestartClusterTask"(type: StandaloneRestIntegTestTask) { + dependsOn "${baseName}#oldVersionClusterTask1" + useCluster testClusters."${baseName}1" + doFirst { + testClusters."${baseName}1".upgradeAllNodesAndPluginsToNextVersion(plugins) + } + filter { + includeTestsMatching "org.opensearch.integTest.bwc.*IT" + } + systemProperty 'tests.rest.bwcsuite', 'upgraded_cluster' + systemProperty 'tests.plugin_bwc_version', bwcVersion + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}1".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}1".getName()}") +} + +// A bwc test suite which runs all the bwc tasks combined +task bwcTestSuite(type: StandaloneRestIntegTestTask) { + exclude '**/*Test*' + exclude '**/*IT*' + dependsOn tasks.named("${baseName}#mixedClusterTask") + dependsOn tasks.named("${baseName}#rollingUpgradeClusterTask") + dependsOn tasks.named("${baseName}#fullRestartClusterTask") +} + task integTestRemote(type: RestIntegTestTask) { testClassesDirs = sourceSets.test.output.classesDirs classpath = sourceSets.test.runtimeClasspath @@ -355,7 +540,7 @@ run { task ktlint(type: JavaExec, group: "verification") { description = "Check Kotlin code style." - main = "com.pinterest.ktlint.Main" + mainClass = "com.pinterest.ktlint.Main" classpath = configurations.ktlint args "src/**/*.kt" // to generate report in checkstyle format prepend following args: @@ -367,9 +552,10 @@ check.dependsOn ktlint task ktlintFormat(type: JavaExec, group: "formatting") { description = "Fix Kotlin code style deviations." - main = "com.pinterest.ktlint.Main" + mainClass = "com.pinterest.ktlint.Main" classpath = configurations.ktlint args "-F", "src/**/*.kt" + jvmArgs "--add-opens=java.base/java.lang=ALL-UNNAMED" } compileKotlin { kotlinOptions.freeCompilerArgs = ['-Xjsr305=strict'] } diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 943f0cbf..d64cd491 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 50832291..e6aba251 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 65dcd68d..1aa94a42 100755 --- a/gradlew +++ b/gradlew @@ -83,10 +83,8 @@ done # This is normally unused # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -133,10 +131,13 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. @@ -144,7 +145,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac @@ -152,7 +153,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then '' | soft) :;; #( *) # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -197,11 +198,15 @@ if "$cygwin" || "$msys" ; then done fi -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ diff --git a/src/test/kotlin/org/opensearch/integTest/bwc/ReportsSchedulerBackwardsCompatibilityIT.kt b/src/test/kotlin/org/opensearch/integTest/bwc/ReportsSchedulerBackwardsCompatibilityIT.kt index 875ab509..58c3057d 100644 --- a/src/test/kotlin/org/opensearch/integTest/bwc/ReportsSchedulerBackwardsCompatibilityIT.kt +++ b/src/test/kotlin/org/opensearch/integTest/bwc/ReportsSchedulerBackwardsCompatibilityIT.kt @@ -50,8 +50,8 @@ class ReportsSchedulerBackwardsCompatibilityIT : PluginRestTestCase() { val pluginNames = plugins.map { plugin -> plugin["name"] }.toSet() when (CLUSTER_TYPE) { ClusterType.OLD -> { - assertTrue(pluginNames.contains("opendistro-reports-scheduler")) - assertTrue(pluginNames.contains("opendistro-job-scheduler")) + assertTrue(pluginNames.contains("opensearch-reports-scheduler")) + assertTrue(pluginNames.contains("opensearch-job-scheduler")) createBasicReportDefinition() } ClusterType.MIXED -> {