diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java index b294fe97c7e7c..fdb09594a1cda 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -83,6 +83,8 @@ public class VectorScorerBenchmark { RandomVectorScorer luceneDotScorerQuery; RandomVectorScorer nativeDotScorerQuery; + RandomVectorScorer luceneSqrScorerQuery; + RandomVectorScorer nativeSqrScorerQuery; @Setup public void setup() throws IOException { @@ -130,6 +132,8 @@ public void setup() throws IOException { } luceneDotScorerQuery = luceneScorer(values, VectorSimilarityFunction.DOT_PRODUCT, queryVec); nativeDotScorerQuery = factory.getInt7SQVectorScorer(VectorSimilarityFunction.DOT_PRODUCT, values, queryVec).get(); + luceneSqrScorerQuery = luceneScorer(values, VectorSimilarityFunction.EUCLIDEAN, queryVec); + nativeSqrScorerQuery = factory.getInt7SQVectorScorer(VectorSimilarityFunction.EUCLIDEAN, values, queryVec).get(); // sanity var f1 = dotProductLucene(); @@ -157,6 +161,12 @@ public void setup() throws IOException { if (q1 != q2) { throw new AssertionError("query: lucene[" + q1 + "] != " + "native[" + q2 + "]"); } + + var sqr1 = squareDistanceLuceneQuery(); + var sqr2 = squareDistanceNativeQuery(); + if (sqr1 != sqr2) { + throw new AssertionError("query: lucene[" + q1 + "] != " + "native[" + q2 + "]"); + } } @TearDown @@ -217,6 +227,16 @@ public float squareDistanceScalar() { return 1 / (1f + adjustedDistance); } + @Benchmark + public float squareDistanceLuceneQuery() throws IOException { + return luceneSqrScorerQuery.score(1); + } + + @Benchmark + public float squareDistanceNativeQuery() throws IOException { + return nativeSqrScorerQuery.score(1); + } + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java index 28b90714508bd..538008d1dcac1 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java @@ -10,6 +10,7 @@ package org.elasticsearch.gradle.internal.conventions; import org.elasticsearch.gradle.internal.conventions.info.GitInfo; +import org.elasticsearch.gradle.internal.conventions.info.GitInfoValueSource; import org.elasticsearch.gradle.internal.conventions.util.Util; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -18,38 +19,28 @@ import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; -import javax.inject.Inject; import java.io.File; -class GitInfoPlugin implements Plugin { +import javax.inject.Inject; - private ProviderFactory factory; - private ObjectFactory objectFactory; +public abstract class GitInfoPlugin implements Plugin { + private ProviderFactory factory; private Provider revision; - private Property gitInfo; @Inject - GitInfoPlugin(ProviderFactory factory, ObjectFactory objectFactory) { + public GitInfoPlugin(ProviderFactory factory) { this.factory = factory; - this.objectFactory = objectFactory; } @Override public void apply(Project project) { File rootDir = Util.locateElasticsearchWorkspace(project.getGradle()); - gitInfo = objectFactory.property(GitInfo.class).value(factory.provider(() -> - GitInfo.gitInfo(rootDir) - )); - gitInfo.disallowChanges(); - gitInfo.finalizeValueOnRead(); - - revision = gitInfo.map(info -> info.getRevision() == null ? info.getRevision() : "main"); + getGitInfo().convention(factory.of(GitInfoValueSource.class, spec -> { spec.getParameters().getPath().set(rootDir); })); + revision = getGitInfo().map(info -> info.getRevision() == null ? info.getRevision() : "main"); } - public Property getGitInfo() { - return gitInfo; - } + public abstract Property getGitInfo(); public Provider getRevision() { return revision; diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/LicensingPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/LicensingPlugin.java index ba170d083c886..63514ae671bf3 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/LicensingPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/LicensingPlugin.java @@ -15,9 +15,10 @@ import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; -import javax.inject.Inject; import java.util.Map; +import javax.inject.Inject; + public class LicensingPlugin implements Plugin { static final String ELASTIC_LICENSE_URL_PREFIX = "https://raw.githubusercontent.com/elastic/elasticsearch/"; static final String ELASTIC_LICENSE_URL_POSTFIX = "/licenses/ELASTIC-LICENSE-2.0.txt"; @@ -33,24 +34,33 @@ public LicensingPlugin(ProviderFactory providerFactory) { @Override public void apply(Project project) { Provider revision = project.getRootProject().getPlugins().apply(GitInfoPlugin.class).getRevision(); - Provider licenseCommitProvider = providerFactory.provider(() -> - isSnapshotVersion(project) ? revision.get() : "v" + project.getVersion() + Provider licenseCommitProvider = providerFactory.provider( + () -> isSnapshotVersion(project) ? revision.get() : "v" + project.getVersion() ); - Provider elasticLicenseURL = licenseCommitProvider.map(licenseCommit -> ELASTIC_LICENSE_URL_PREFIX + - licenseCommit + ELASTIC_LICENSE_URL_POSTFIX); - Provider agplLicenseURL = licenseCommitProvider.map(licenseCommit -> ELASTIC_LICENSE_URL_PREFIX + - licenseCommit + AGPL_ELASTIC_LICENSE_URL_POSTFIX); + Provider elasticLicenseURL = licenseCommitProvider.map( + licenseCommit -> ELASTIC_LICENSE_URL_PREFIX + licenseCommit + ELASTIC_LICENSE_URL_POSTFIX + ); + Provider agplLicenseURL = licenseCommitProvider.map( + licenseCommit -> ELASTIC_LICENSE_URL_PREFIX + licenseCommit + AGPL_ELASTIC_LICENSE_URL_POSTFIX + ); // But stick the Elastic license url in project.ext so we can get it if we need to switch to it project.getExtensions().getExtraProperties().set("elasticLicenseUrl", elasticLicenseURL); - MapProperty licensesProperty = project.getObjects().mapProperty(String.class, String.class).convention( - providerFactory.provider(() -> Map.of( - "Server Side Public License, v 1", "https://www.mongodb.com/licensing/server-side-public-license", - "Elastic License 2.0", elasticLicenseURL.get(), - "GNU Affero General Public License Version 3", agplLicenseURL.get()) + MapProperty> licensesProperty = project.getObjects() + .mapProperty(String.class, (Class>) (Class) Provider.class) + .convention( + providerFactory.provider( + () -> Map.of( + "Server Side Public License, v 1", + providerFactory.provider(() -> "https://www.mongodb.com/licensing/server-side-public-license"), + "Elastic License 2.0", + elasticLicenseURL, + "GNU Affero General Public License Version 3", + agplLicenseURL + ) ) - ); + ); // Default to the SSPL+Elastic dual license project.getExtensions().getExtraProperties().set("projectLicenses", licensesProperty); diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java index c3124812e5089..22b0ab1918024 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java @@ -28,6 +28,7 @@ import org.gradle.api.plugins.JavaLibraryPlugin; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.provider.MapProperty; +import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.publish.PublishingExtension; import org.gradle.api.publish.maven.MavenPublication; @@ -42,6 +43,7 @@ import java.io.File; import java.util.Map; import java.util.concurrent.Callable; + import javax.inject.Inject; public class PublishPlugin implements Plugin { @@ -81,7 +83,7 @@ private void configurePublications(Project project) { } }); @SuppressWarnings("unchecked") - var projectLicenses = (MapProperty) project.getExtensions().getExtraProperties().get("projectLicenses"); + var projectLicenses = (MapProperty>) project.getExtensions().getExtraProperties().get("projectLicenses"); publication.getPom().withXml(xml -> { var node = xml.asNode(); node.appendNode("inceptionYear", "2009"); @@ -89,7 +91,7 @@ private void configurePublications(Project project) { projectLicenses.get().entrySet().stream().sorted(Map.Entry.comparingByKey()).forEach(entry -> { Node license = licensesNode.appendNode("license"); license.appendNode("name", entry.getKey()); - license.appendNode("url", entry.getValue()); + license.appendNode("url", entry.getValue().get()); license.appendNode("distribution", "repo"); }); var developer = node.appendNode("developers").appendNode("developer"); @@ -194,7 +196,6 @@ static void configureSourcesJar(Project project) { }); } - /** * Format the generated pom files to be in a sort of reproducible order. */ diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfo.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfo.java index dbd3b3f9c48ad..e6a41093205cc 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfo.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfo.java @@ -22,6 +22,7 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -190,4 +191,15 @@ public String urlFromOrigin() { } } + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + GitInfo gitInfo = (GitInfo) o; + return Objects.equals(revision, gitInfo.revision) && Objects.equals(origin, gitInfo.origin); + } + + @Override + public int hashCode() { + return Objects.hash(revision, origin); + } } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfoValueSource.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfoValueSource.java new file mode 100644 index 0000000000000..c422c2eb74e39 --- /dev/null +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/GitInfoValueSource.java @@ -0,0 +1,22 @@ +package org.elasticsearch.gradle.internal.conventions.info; + +import org.gradle.api.provider.Property; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; +import org.jetbrains.annotations.Nullable; + +import java.io.File; + +public abstract class GitInfoValueSource implements ValueSource { + + @Nullable + @Override + public GitInfo obtain() { + File path = getParameters().getPath().get(); + return GitInfo.gitInfo(path); + } + + public interface Parameters extends ValueSourceParameters { + Property getPath(); + } +} diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index 65f124e5f88e8..18b681ef5a9ef 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -45,7 +45,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { file("build/distributions/hello-world-1.0-javadoc.jar").exists() file("build/distributions/hello-world-1.0-sources.jar").exists() file("build/distributions/hello-world-1.0.pom").exists() - assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """ + assertXmlEquals( + file("build/distributions/hello-world-1.0.pom").text, """ @@ -130,7 +131,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { file("build/distributions/hello-world-1.0-javadoc.jar").exists() file("build/distributions/hello-world-1.0-sources.jar").exists() file("build/distributions/hello-world-1.0.pom").exists() - assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """ + assertXmlEquals( + file("build/distributions/hello-world-1.0.pom").text, """ 4.0.0 org.acme @@ -219,7 +221,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { file("build/distributions/hello-world-1.0-javadoc.jar").exists() file("build/distributions/hello-world-1.0-sources.jar").exists() file("build/distributions/hello-world-1.0.pom").exists() - assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """ + assertXmlEquals( + file("build/distributions/hello-world-1.0.pom").text, """ 4.0.0 org.acme @@ -312,7 +315,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { file("build/distributions/hello-world-plugin-1.0-javadoc.jar").exists() file("build/distributions/hello-world-plugin-1.0-sources.jar").exists() file("build/distributions/hello-world-plugin-1.0.pom").exists() - assertXmlEquals(file("build/distributions/hello-world-plugin-1.0.pom").text, """ + assertXmlEquals( + file("build/distributions/hello-world-plugin-1.0.pom").text, """ @@ -389,7 +393,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { then: result.task(":generatePom").outcome == TaskOutcome.SUCCESS file("build/distributions/hello-world-plugin-2.0.pom").exists() - assertXmlEquals(file("build/distributions/hello-world-plugin-2.0.pom").text, """ + assertXmlEquals( + file("build/distributions/hello-world-plugin-2.0.pom").text, """ @@ -439,7 +444,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { // scm info only added for internal builds internalBuild() buildFile << """ - buildParams.setGitOrigin("https://some-repo.com/repo.git") + buildParams.setGitOrigin(project.providers.provider(() -> "https://some-repo.com/repo.git")) apply plugin:'elasticsearch.java' apply plugin:'elasticsearch.publish' @@ -447,7 +452,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { group = 'org.acme' description = "just a test project" - ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) + ext.projectLicenses.set(['The Apache Software License, Version 2.0': project.providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) """ when: @@ -456,7 +461,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { then: result.task(":generatePom").outcome == TaskOutcome.SUCCESS file("build/distributions/hello-world-1.0.pom").exists() - assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """ + assertXmlEquals( + file("build/distributions/hello-world-1.0.pom").text, """ @@ -493,15 +499,15 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { private boolean assertXmlEquals(String toTest, String expected) { def diff = DiffBuilder.compare(Input.fromString(expected)) - .ignoreWhitespace() - .ignoreComments() - .normalizeWhitespace() - .withTest(Input.fromString(toTest)) - .build() + .ignoreWhitespace() + .ignoreComments() + .normalizeWhitespace() + .withTest(Input.fromString(toTest)) + .build() diff.differences.each { difference -> println difference } - if(diff.differences.size() > 0) { + if (diff.differences.size() > 0) { println """ given: $toTest """ diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index eb3a529498fa7..60ae4d58f343e 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -166,7 +166,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { tasks.register('buildDependencyArtifacts') { group = 'ide' description = 'Builds artifacts needed as dependency for IDE modules' - dependsOn([':plugins:repository-hdfs:hadoop-client-api:shadowJar', + dependsOn([':plugins:repository-hdfs:hadoop-client-api:jar', ':x-pack:plugin:esql:compute:ann:jar', ':x-pack:plugin:esql:compute:gen:jar', ':server:generateModulesList', diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java index 3ab85ba69dc80..a4477d049460c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java @@ -27,6 +27,7 @@ import org.gradle.api.plugins.JavaLibraryPlugin; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.provider.Property; +import org.gradle.api.provider.Provider; import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.bundling.Jar; import org.gradle.api.tasks.javadoc.Javadoc; @@ -120,12 +121,12 @@ public void execute(Task task) { } private static void configureJarManifest(Project project, BuildParameterExtension buildParams) { - String gitOrigin = buildParams.getGitOrigin(); - String gitRevision = buildParams.getGitRevision(); + Provider gitOrigin = buildParams.getGitOrigin(); + Provider gitRevision = buildParams.getGitRevision(); project.getPlugins().withType(InfoBrokerPlugin.class).whenPluginAdded(manifestPlugin -> { - manifestPlugin.add("Module-Origin", toStringable(() -> gitOrigin)); - manifestPlugin.add("Change", toStringable(() -> gitRevision)); + manifestPlugin.add("Module-Origin", toStringable(() -> gitOrigin.get())); + manifestPlugin.add("Change", toStringable(() -> gitRevision.get())); manifestPlugin.add("X-Compile-Elasticsearch-Version", toStringable(VersionProperties::getElasticsearch)); manifestPlugin.add("X-Compile-Lucene-Version", toStringable(VersionProperties::getLucene)); manifestPlugin.add( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java index e80dc6ef1b44c..ef9055b3728d3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java @@ -46,9 +46,9 @@ public interface BuildParameterExtension { Provider getRuntimeJavaDetails(); - String getGitRevision(); + Provider getGitRevision(); - String getGitOrigin(); + Provider getGitOrigin(); ZonedDateTime getBuildDate(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java index faac406d974c6..283c02428e4e6 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java @@ -36,7 +36,7 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx private final Provider runtimeJavaVersion; private final Provider> javaToolChainSpec; private final Provider runtimeJavaDetails; - private final String gitRevision; + private final Provider gitRevision; private transient AtomicReference buildDate = new AtomicReference<>(); private final String testSeed; @@ -46,7 +46,7 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx // not final for testing private Provider bwcVersions; - private String gitOrigin; + private Provider gitOrigin; public DefaultBuildParameterExtension( ProviderFactory providers, @@ -59,8 +59,8 @@ public DefaultBuildParameterExtension( JavaVersion minimumCompilerVersion, JavaVersion minimumRuntimeVersion, JavaVersion gradleJavaVersion, - String gitRevision, - String gitOrigin, + Provider gitRevision, + Provider gitOrigin, String testSeed, boolean isCi, int defaultParallel, @@ -155,12 +155,12 @@ public Provider getRuntimeJavaDetails() { } @Override - public String getGitRevision() { + public Provider getGitRevision() { return gitRevision; } @Override - public String getGitOrigin() { + public Provider getGitOrigin() { return gitOrigin; } @@ -239,7 +239,7 @@ public void setBwcVersions(Provider bwcVersions) { } // for testing; not part of public api - public void setGitOrigin(String gitOrigin) { + public void setGitOrigin(Provider gitOrigin) { this.gitOrigin = gitOrigin; } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 86f59aa0ab41e..675f1198b2a7d 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -14,6 +14,7 @@ import org.apache.commons.io.IOUtils; import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.BwcVersions; +import org.elasticsearch.gradle.internal.conventions.GitInfoPlugin; import org.elasticsearch.gradle.internal.conventions.info.GitInfo; import org.elasticsearch.gradle.internal.conventions.info.ParallelDetector; import org.elasticsearch.gradle.internal.conventions.util.Util; @@ -96,6 +97,8 @@ public void apply(Project project) { } this.project = project; project.getPlugins().apply(JvmToolchainsPlugin.class); + Provider gitInfo = project.getPlugins().apply(GitInfoPlugin.class).getGitInfo(); + toolChainService = project.getExtensions().getByType(JavaToolchainService.class); GradleVersion minimumGradleVersion = GradleVersion.version(getResourceContents("/minimumGradleVersion")); if (GradleVersion.current().compareTo(minimumGradleVersion) < 0) { @@ -111,8 +114,6 @@ public void apply(Project project) { ? explicitRuntimeJavaHome : resolveJavaHomeFromToolChainService(VersionProperties.getBundledJdkMajorVersion()); - GitInfo gitInfo = GitInfo.gitInfo(project.getRootDir()); - Provider runtimeJdkMetaData = actualRuntimeJavaHome.map( runtimeJavaHome -> metadataDetector.getMetadata(getJavaInstallation(runtimeJavaHome)) ); @@ -143,8 +144,8 @@ public void apply(Project project) { minimumCompilerVersion, minimumRuntimeVersion, Jvm.current().getJavaVersion(), - gitInfo.getRevision(), - gitInfo.getOrigin(), + gitInfo.map(g -> g.getRevision()), + gitInfo.map(g -> g.getOrigin()), getTestSeed(), System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null, ParallelDetector.findDefaultParallel(project), diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy index 343268b9b4d47..ce63069a873ab 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy @@ -9,9 +9,9 @@ package org.elasticsearch.gradle.internal.info +import spock.lang.Ignore import spock.lang.Specification -import org.elasticsearch.gradle.internal.BwcVersions import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.provider.Provider @@ -31,6 +31,7 @@ class BuildParameterExtensionSpec extends Specification { ProjectBuilder projectBuilder = new ProjectBuilder() + @Ignore def "#getterName is cached anc concurrently accessible"() { given: def project = projectBuilder.build() @@ -85,8 +86,8 @@ class BuildParameterExtensionSpec extends Specification { JavaVersion.VERSION_11, JavaVersion.VERSION_11, JavaVersion.VERSION_11, - "gitRevision", - "gitOrigin", + providerMock(), + providerMock(), "testSeed", false, 5, diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 003c251186510..3fb2aa6595869 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -36,7 +36,7 @@ base { } // LLRC is licenses under Apache 2.0 -projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) +projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) dependencies { diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index f6f26c8f7c0d5..9b1cb1140311b 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -32,7 +32,7 @@ base { } // rest client sniffer is licenses under Apache 2.0 -projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) +projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) dependencies { diff --git a/client/test/build.gradle b/client/test/build.gradle index 8de6b3dbf92be..e39b7587b69d5 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -18,7 +18,7 @@ java { group = "${group}.client.test" // rest client sniffer is licenses under Apache 2.0 -projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) +projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) dependencies { diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index 6cb030565d9d2..48881660b30fe 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -22,7 +22,7 @@ <% if (docker_base == 'iron_bank') { %> ARG BASE_REGISTRY=registry1.dso.mil ARG BASE_IMAGE=ironbank/redhat/ubi/ubi9 -ARG BASE_TAG=9.4 +ARG BASE_TAG=9.5 <% } %> ################################################################################ diff --git a/docs/changelog/116868.yaml b/docs/changelog/116868.yaml new file mode 100644 index 0000000000000..4ca4d23306462 --- /dev/null +++ b/docs/changelog/116868.yaml @@ -0,0 +1,5 @@ +pr: 116868 +summary: Run `TransportGetComponentTemplateAction` on local node +area: Indices APIs +type: enhancement +issues: [] diff --git a/docs/changelog/117214.yaml b/docs/changelog/117214.yaml new file mode 100644 index 0000000000000..ba74197eb7634 --- /dev/null +++ b/docs/changelog/117214.yaml @@ -0,0 +1,5 @@ +pr: 117214 +summary: Returning ignored fields in the simulate ingest API +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/118999.yaml b/docs/changelog/118999.yaml new file mode 100644 index 0000000000000..0188cebbd7685 --- /dev/null +++ b/docs/changelog/118999.yaml @@ -0,0 +1,6 @@ +pr: 118999 +summary: Fix loss of context in the inference API for streaming APIs +area: Machine Learning +type: bug +issues: + - 119000 diff --git a/docs/changelog/119134.yaml b/docs/changelog/119134.yaml new file mode 100644 index 0000000000000..c4aefac91c701 --- /dev/null +++ b/docs/changelog/119134.yaml @@ -0,0 +1,6 @@ +pr: 119134 +summary: Handle `index.mapping.ignore_malformed` in downsampling +area: Downsampling +type: bug +issues: + - 119075 diff --git a/docs/reference/indices/get-component-template.asciidoc b/docs/reference/indices/get-component-template.asciidoc index f35192ca448db..7e16457730f42 100644 --- a/docs/reference/indices/get-component-template.asciidoc +++ b/docs/reference/indices/get-component-template.asciidoc @@ -67,7 +67,7 @@ Wildcard (`*`) expressions are supported. include::{docdir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{docdir}/rest-api/common-parms.asciidoc[tag=local] +include::{docdir}/rest-api/common-parms.asciidoc[tag=local-deprecated-9.0.0] include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 993bb8cb894f9..83c11c9256a67 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -668,6 +668,16 @@ node only. Defaults to `false`, which means information is retrieved from the master node. end::local[] +tag::local-deprecated-9.0.0[] +`local`:: +(Optional, Boolean) If `true`, the request retrieves information from the local +node only. Defaults to `false`, which means information is retrieved from +the master node. ++ +deprecated::[9.0.0, "The `?local` query parameter to this API has no effect, is now deprecated, and will be removed in a future version."] + +end::local-deprecated-9.0.0[] + tag::mappings[] `mappings`:: + diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index b57d2aee9d190..b284e1eb00e7e 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -207,16 +207,8 @@ GET /_xpack/usage "inference": { "available" : true, "enabled" : true, - "models" : [{ - "service": "elasticsearch", - "task_type": "SPARSE_EMBEDDING", - "count": 1 - }, - { - "service": "elasticsearch", - "task_type": "TEXT_EMBEDDING", - "count": 1 - }, + "models" : [ + ... ] }, "logstash" : { @@ -523,7 +515,10 @@ GET /_xpack/usage "available": true, "enabled": false, "indices_count": 0, - "indices_with_synthetic_source": 0 + "indices_with_synthetic_source": 0, + "num_docs": 0, + "size_in_bytes": 0, + "has_custom_cutoff_date": false } } ------------------------------------------------------------ @@ -535,6 +530,7 @@ GET /_xpack/usage // TESTRESPONSE[s/"policy_stats" : \[[^\]]*\]/"policy_stats" : $body.$_path/] // TESTRESPONSE[s/"slm" : \{[^\}]*\},/"slm" : $body.$_path,/] // TESTRESPONSE[s/"health_api" : \{[^\}]*\}\s*\}/"health_api" : $body.$_path/] +// TESTRESPONSE[s/"models" : \[[^\]]*\]/"models" : $body.$_path/] // TESTRESPONSE[s/"data_streams" : \{[^\}]*\},/"data_streams" : $body.$_path,/] // TESTRESPONSE[s/ : true/ : $body.$_path/] // TESTRESPONSE[s/ : false/ : $body.$_path/] @@ -551,4 +547,5 @@ GET /_xpack/usage // 5. All of the numbers and strings on the right hand side of *every* field in // the response are ignored. So we're really only asserting things about the // the shape of this response, not the values in it. -// 6. Ignore the contents of data streams until the failure store is tech preview. +// 6. Ignore the contents of the `inference.models` array because the models might not yet have been initialized +// 7. Ignore the contents of data streams until the failure store is tech preview. diff --git a/libs/h3/build.gradle b/libs/h3/build.gradle index 81a0d56ed4606..6036323e160fc 100644 --- a/libs/h3/build.gradle +++ b/libs/h3/build.gradle @@ -35,7 +35,7 @@ tasks.named('forbiddenApisMain').configure { replaceSignatureFiles 'jdk-signatures' } -ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) +ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) tasks.withType(LicenseHeadersTask.class).configureEach { diff --git a/libs/tdigest/build.gradle b/libs/tdigest/build.gradle index 2713df701fb44..b79a6ce0a486a 100644 --- a/libs/tdigest/build.gradle +++ b/libs/tdigest/build.gradle @@ -36,7 +36,7 @@ tasks.named('forbiddenApisMain').configure { replaceSignatureFiles 'jdk-signatures' } -ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) +ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) tasks.withType(LicenseHeadersTask.class).configureEach { diff --git a/muted-tests.yml b/muted-tests.yml index b86aad0d091d6..1b559baac3f77 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -64,9 +64,6 @@ tests: - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5Small_withPlatformSpecificVariant issue: https://github.com/elastic/elasticsearch/issues/113950 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/rest-api/usage/line_38} - issue: https://github.com/elastic/elasticsearch/issues/113694 - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT method: testTracingCrossCluster issue: https://github.com/elastic/elasticsearch/issues/112731 @@ -233,8 +230,6 @@ tests: - class: org.elasticsearch.packaging.test.ArchiveTests method: test51AutoConfigurationWithPasswordProtectedKeystore issue: https://github.com/elastic/elasticsearch/issues/118212 -- class: org.elasticsearch.ingest.common.IngestCommonClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/118215 - class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT method: test {p0=data_stream/120_data_streams_stats/Multiple data stream} issue: https://github.com/elastic/elasticsearch/issues/118217 diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 4da7c24de80f1..dea1e1bdd273f 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -28,7 +28,7 @@ configurations { } dependencies { - api project(path: 'hadoop-client-api', configuration: 'shadow') + api project(path: 'hadoop-client-api', configuration: 'default') if (isEclipse) { /* * Eclipse can't pick up the shadow dependency so we point it at *something* diff --git a/plugins/repository-hdfs/hadoop-client-api/build.gradle b/plugins/repository-hdfs/hadoop-client-api/build.gradle index 24e4213780fe2..5e87b81292501 100644 --- a/plugins/repository-hdfs/hadoop-client-api/build.gradle +++ b/plugins/repository-hdfs/hadoop-client-api/build.gradle @@ -1,16 +1,46 @@ -apply plugin: 'elasticsearch.build' -apply plugin: 'com.gradleup.shadow' +apply plugin: 'elasticsearch.java' + +sourceSets { + patcher +} + +configurations { + thejar { + canBeResolved = true + } +} dependencies { - implementation "org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}" + thejar("org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}") { + transitive = false + } + + patcherImplementation 'org.ow2.asm:asm:9.7.1' + patcherImplementation 'org.ow2.asm:asm-tree:9.7.1' } -tasks.named('shadowJar').configure { - exclude 'org/apache/hadoop/util/ShutdownHookManager$*.class' +def outputDir = layout.buildDirectory.dir("patched-classes") + +def patchTask = tasks.register("patchClasses", JavaExec) { + inputs.files(configurations.thejar).withPathSensitivity(PathSensitivity.RELATIVE) + inputs.files(sourceSets.patcher.output).withPathSensitivity(PathSensitivity.RELATIVE) + outputs.dir(outputDir) + classpath = sourceSets.patcher.runtimeClasspath + mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher' + doFirst { + args(configurations.thejar.singleFile, outputDir.get().asFile) + } } -['jarHell', 'thirdPartyAudit', 'forbiddenApisMain', 'splitPackagesAudit'].each { - tasks.named(it).configure { - enabled = false +tasks.named('jar').configure { + dependsOn(configurations.thejar) + + from(patchTask) + from({ project.zipTree(configurations.thejar.singleFile) }) { + eachFile { + if (outputDir.get().file(it.relativePath.pathString).asFile.exists()) { + it.exclude() + } + } } } diff --git a/plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java b/plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java deleted file mode 100644 index c3d15dc06e7c1..0000000000000 --- a/plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.apache.hadoop.util; - -import java.util.concurrent.TimeUnit; - -/** - * A replacement for the ShutdownHookManager from hadoop. - * - * This class does not actually add a shutdown hook. Hadoop's shutdown hook - * manager does not fail gracefully when it lacks security manager permissions - * to add shutdown hooks. This implements the same api as the hadoop class, but - * with no-ops. - */ -public class ShutdownHookManager { - private static final ShutdownHookManager MGR = new ShutdownHookManager(); - - public static ShutdownHookManager get() { - return MGR; - } - - private ShutdownHookManager() {} - - public void addShutdownHook(Runnable shutdownHook, int priority) {} - - public void addShutdownHook(Runnable shutdownHook, int priority, long timeout, TimeUnit unit) {} - - public boolean removeShutdownHook(Runnable shutdownHook) { - return false; - } - - public boolean hasShutdownHook(Runnable shutdownHook) { - return false; - } - - public boolean isShutdownInProgress() { - return false; - } - - public void clearShutdownHooks() {} -} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java new file mode 100644 index 0000000000000..6636b39445964 --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Map; +import java.util.function.Function; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +public class HdfsClassPatcher { + static final Map> patchers = Map.of( + "org/apache/hadoop/util/ShutdownHookManager.class", + ShutdownHookManagerPatcher::new, + "org/apache/hadoop/util/Shell.class", + ShellPatcher::new + ); + + public static void main(String[] args) throws Exception { + String jarPath = args[0]; + Path outputDir = Paths.get(args[1]); + + try (JarFile jarFile = new JarFile(new File(jarPath))) { + for (var patcher : patchers.entrySet()) { + JarEntry jarEntry = jarFile.getJarEntry(patcher.getKey()); + if (jarEntry == null) { + throw new IllegalArgumentException("path [" + patcher.getKey() + "] not found in [" + jarPath + "]"); + } + byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes(); + + ClassReader classReader = new ClassReader(classToPatch); + ClassWriter classWriter = new ClassWriter(classReader, 0); + classReader.accept(patcher.getValue().apply(classWriter), 0); + + Path outputFile = outputDir.resolve(patcher.getKey()); + Files.createDirectories(outputFile.getParent()); + Files.write(outputFile, classWriter.toByteArray()); + } + } + } +} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java new file mode 100644 index 0000000000000..e07a32cc294a5 --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; + +public class MethodReplacement extends MethodVisitor { + private final MethodVisitor delegate; + private final Runnable bodyWriter; + + MethodReplacement(MethodVisitor delegate, Runnable bodyWriter) { + super(Opcodes.ASM9); + this.delegate = delegate; + this.bodyWriter = bodyWriter; + } + + @Override + public void visitCode() { + // delegate.visitCode(); + bodyWriter.run(); + // delegate.visitEnd(); + } + + @Override + public void visitMaxs(int maxStack, int maxLocals) { + delegate.visitMaxs(maxStack, maxLocals); + } +} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java new file mode 100644 index 0000000000000..397b63e434ba2 --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; + +class ShellPatcher extends ClassVisitor { + + ShellPatcher(ClassWriter classWriter) { + super(Opcodes.ASM9, classWriter); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodVisitor mv = super.visitMethod(access, name, descriptor, signature, exceptions); + if (name.equals("isSetsidSupported")) { + return new MethodReplacement(mv, () -> { + mv.visitInsn(Opcodes.ICONST_0); + mv.visitInsn(Opcodes.IRETURN); + }); + } + return mv; + } +} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java new file mode 100644 index 0000000000000..1235b5af9002f --- /dev/null +++ b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.hdfs.patch; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; + +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; + +class ShutdownHookManagerPatcher extends ClassVisitor { + private static final String CLASSNAME = "org/apache/hadoop/util/ShutdownHookManager"; + private static final Set VOID_METHODS = Set.of("addShutdownHook", "clearShutdownHooks"); + private static final Set BOOLEAN_METHODS = Set.of("removeShutdownHook", "hasShutdownHook", "isShutdownInProgress"); + + ShutdownHookManagerPatcher(ClassWriter classWriter) { + super(Opcodes.ASM9, classWriter); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodVisitor mv = super.visitMethod(access, name, descriptor, signature, exceptions); + if (VOID_METHODS.contains(name)) { + // make void methods noops + return new MethodReplacement(mv, () -> { mv.visitInsn(Opcodes.RETURN); }); + } else if (BOOLEAN_METHODS.contains(name)) { + // make boolean methods always return false + return new MethodReplacement(mv, () -> { + mv.visitInsn(Opcodes.ICONST_0); + mv.visitInsn(Opcodes.IRETURN); + }); + } else if (name.equals("")) { + return new MethodReplacement(mv, () -> { + // just initialize the statics, don't actually get runtime to add shutdown hook + + var classType = Type.getObjectType(CLASSNAME); + mv.visitTypeInsn(Opcodes.NEW, CLASSNAME); + mv.visitInsn(Opcodes.DUP); + mv.visitMethodInsn(Opcodes.INVOKESPECIAL, CLASSNAME, "", "()V", false); + mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "MGR", classType.getDescriptor()); + + var timeUnitType = Type.getType(TimeUnit.class); + mv.visitFieldInsn(Opcodes.GETSTATIC, timeUnitType.getInternalName(), "SECONDS", timeUnitType.getDescriptor()); + mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "TIME_UNIT_DEFAULT", timeUnitType.getDescriptor()); + + var executorServiceType = Type.getType(ExecutorService.class); + mv.visitInsn(Opcodes.ACONST_NULL); + mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "EXECUTOR", executorServiceType.getDescriptor()); + + mv.visitInsn(Opcodes.RETURN); + }); + } + return mv; + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java index a1b3dcc37c45b..c6f4588fb4cd2 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java @@ -12,7 +12,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.HttpHost; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Build; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Request; @@ -40,7 +39,6 @@ * In 8.2 we also added the ability to filter fields by type and metadata, with some post-hoc filtering applied on * the co-ordinating node if older nodes were included in the system */ -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103473") public class FieldCapsIT extends AbstractRollingUpgradeTestCase { public FieldCapsIT(@Name("upgradedNodes") int upgradedNodes) { diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java index d6254d091a868..c47eeef6101ff 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/RestActionCancellationIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; +import org.elasticsearch.action.admin.indices.template.get.GetComponentTemplateAction; import org.elasticsearch.action.support.CancellableActionTestPlugin; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.RefCountingListener; @@ -66,6 +67,10 @@ public void testCatAliasesCancellation() { runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_cat/aliases"), GetAliasesAction.NAME); } + public void testGetComponentTemplateCancellation() { + runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_component_template"), GetComponentTemplateAction.NAME); + } + private void runRestActionCancellationTest(Request request, String actionName) { final var node = usually() ? internalCluster().getRandomNodeName() : internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 2d3fa6b568381..d4843fb152888 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1720,3 +1720,59 @@ setup: - match: { docs.0.doc._source.foo: 3 } - match: { docs.0.doc._source.bar: "some text value" } - not_exists: docs.0.doc.error + +--- +"Test ignored_fields": + - skip: + features: + - headers + - allowed_warnings + + - requires: + cluster_features: ["simulate.ignored.fields"] + reason: "ingest simulate ignored fields added in 8.18" + + - do: + headers: + Content-Type: application/json + simulate.ingest: + index: nonexistent + body: > + { + "docs": [ + { + "_index": "simulate-test", + "_id": "y9Es_JIBiw6_GgN-U0qy", + "_score": 1, + "_source": { + "abc": "sfdsfsfdsfsfdsfsfdsfsfdsfsfdsf" + } + } + ], + "index_template_substitutions": { + "ind_temp": { + "index_patterns": ["simulate-test"], + "composed_of": ["simulate-test"] + } + }, + "component_template_substitutions": { + "simulate-test": { + "template": { + "mappings": { + "dynamic": false, + "properties": { + "abc": { + "type": "keyword", + "ignore_above": 1 + } + } + } + } + } + } + } + - length: { docs: 1 } + - match: { docs.0.doc._index: "simulate-test" } + - match: { docs.0.doc._source.abc: "sfdsfsfdsfsfdsfsfdsfsfdsfsfdsf" } + - match: { docs.0.doc.ignored_fields: [ {"field": "abc"} ] } + - not_exists: docs.0.doc.error diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 133b756173306..365c6b8cf19bb 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -13,7 +13,7 @@ restResources { } // REST API specifications are published under the Apache 2.0 License -ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0']) +ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) configurations { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json index 818d034ca8158..b2503659329a3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.exists_component_template.json @@ -28,9 +28,10 @@ "params":{ "master_timeout":{ "type":"time", - "description":"Explicit operation timeout for connection to master node" + "description":"Timeout for waiting for new cluster state in case it is blocked" }, "local":{ + "deprecated":true, "type":"boolean", "description":"Return local information, do not retrieve the state from master node (default: false)" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json index 889e1b817b0fe..def0cc5fb8bb0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_component_template.json @@ -34,9 +34,10 @@ "params":{ "master_timeout":{ "type":"time", - "description":"Explicit operation timeout for connection to master node" + "description":"Timeout for waiting for new cluster state in case it is blocked" }, "local":{ + "deprecated":true, "type":"boolean", "description":"Return local information, do not retrieve the state from master node (default: false)" }, diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml index f698d3399f27d..800dec2a795a4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml @@ -171,3 +171,33 @@ - match: {component_templates.0.component_template.template.lifecycle.enabled: true} - match: {component_templates.0.component_template.template.lifecycle.data_retention: "10d"} - is_true: component_templates.0.component_template.template.lifecycle.rollover + +--- +"Deprecated local parameter": + - requires: + capabilities: + - method: GET + path: /_component_template + capabilities: ["local_param_deprecated"] + test_runner_features: ["capabilities", "warnings"] + reason: Deprecation was implemented with capability + + - do: + cluster.get_component_template: + local: true + warnings: + - "the [?local] query parameter to this API has no effect, is now deprecated, and will be removed in a future version" + +--- +"Deprecated local parameter works in v8 compat mode": + - requires: + test_runner_features: ["headers"] + + - do: + headers: + Content-Type: "application/vnd.elasticsearch+json;compatible-with=8" + Accept: "application/vnd.elasticsearch+json;compatible-with=8" + cluster.get_component_template: + local: true + + - exists: component_templates diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/ComponentTemplatesFileSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/ComponentTemplatesFileSettingsIT.java index 8e0dee2396411..5325a17a7b7d6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/ComponentTemplatesFileSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/ComponentTemplatesFileSettingsIT.java @@ -463,7 +463,7 @@ private void assertComponentAndIndexTemplateDelete(CountDownLatch savedClusterSt final var componentResponse = client().execute( GetComponentTemplateAction.INSTANCE, - new GetComponentTemplateAction.Request("other*") + new GetComponentTemplateAction.Request(TEST_REQUEST_TIMEOUT, "other*") ).get(); assertTrue(componentResponse.getComponentTemplates().isEmpty()); diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index ab8b66e765e91..3495908da7eeb 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -145,6 +145,7 @@ static TransportVersion def(int id) { public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0); public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0); public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_00_0); + public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index f729455edcc24..4f61b89aeaf5e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -14,12 +14,17 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.action.support.local.LocalClusterStateRequest; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -43,22 +48,23 @@ private GetComponentTemplateAction() { /** * Request that to retrieve one or more component templates */ - public static class Request extends MasterNodeReadRequest { + public static class Request extends LocalClusterStateRequest { @Nullable private String name; private boolean includeDefaults; - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); - } - - public Request(String name) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterTimeout, String name) { + super(masterTimeout); this.name = name; this.includeDefaults = false; } + /** + * NB prior to 9.0 get-component was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) public Request(StreamInput in) throws IOException { super(in); name = in.readOptionalString(); @@ -70,17 +76,13 @@ public Request(StreamInput in) throws IOException { } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalString(name); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { - out.writeBoolean(includeDefaults); - } + public ActionRequestValidationException validate() { + return null; } @Override - public ActionRequestValidationException validate() { - return null; + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); } /** @@ -123,19 +125,6 @@ public static class Response extends ActionResponse implements ToXContentObject @Nullable private final RolloverConfiguration rolloverConfiguration; - public Response(StreamInput in) throws IOException { - super(in); - componentTemplates = in.readMap(ComponentTemplate::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { - rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); - } else { - rolloverConfiguration = null; - } - if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) { - in.readOptionalWriteable(DataStreamGlobalRetention::read); - } - } - /** * Please use {@link GetComponentTemplateAction.Response#Response(Map)} */ @@ -183,6 +172,11 @@ public DataStreamGlobalRetention getGlobalRetention() { return null; } + /** + * NB prior to 9.0 get-component was a TransportMasterNodeReadAction so for BwC we must remain able to write these responses until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(componentTemplates, StreamOutput::writeWriteable); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java index d04ccd39be04b..40e1a988dc2f2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java @@ -12,51 +12,61 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.action.support.local.TransportLocalClusterStateAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.HashMap; import java.util.Map; -public class TransportGetComponentTemplateAction extends TransportMasterNodeReadAction< +public class TransportGetComponentTemplateAction extends TransportLocalClusterStateAction< GetComponentTemplateAction.Request, GetComponentTemplateAction.Response> { private final ClusterSettings clusterSettings; + /** + * NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC it must be registered with the TransportService until + * we no longer need to support calling this action remotely. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT) + @SuppressWarnings("this-escape") @Inject public TransportGetComponentTemplateAction( TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver + ActionFilters actionFilters ) { super( GetComponentTemplateAction.NAME, - transportService, - clusterService, - threadPool, actionFilters, - GetComponentTemplateAction.Request::new, - indexNameExpressionResolver, - GetComponentTemplateAction.Response::new, + transportService.getTaskManager(), + clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE ); clusterSettings = clusterService.getClusterSettings(); + + transportService.registerRequestHandler( + actionName, + executor, + false, + true, + GetComponentTemplateAction.Request::new, + (request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel)) + ); } @Override @@ -65,12 +75,13 @@ protected ClusterBlockException checkBlock(GetComponentTemplateAction.Request re } @Override - protected void masterOperation( + protected void localClusterStateOperation( Task task, GetComponentTemplateAction.Request request, ClusterState state, ActionListener listener ) { + final var cancellableTask = (CancellableTask) task; Map allTemplates = state.metadata().componentTemplates(); Map results; @@ -93,6 +104,7 @@ protected void masterOperation( } } + cancellableTask.ensureNotCancelled(); if (request.includeDefaults()) { listener.onResponse( new GetComponentTemplateAction.Response( diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java index 62a9b88cb6a57..998a3ada5d157 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java @@ -15,6 +15,7 @@ import java.util.Set; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS; +import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_IGNORED_FIELDS; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_ADDITION; import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION; @@ -29,7 +30,8 @@ public Set getFeatures() { SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS, SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS, SIMULATE_MAPPING_ADDITION, - SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING + SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING, + SIMULATE_IGNORED_FIELDS ); } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index 1353fa78595ef..8233d4b334929 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -9,6 +9,8 @@ package org.elasticsearch.action.bulk; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.template.post.TransportSimulateIndexTemplateAction; @@ -33,6 +35,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettingProviders; @@ -40,6 +43,8 @@ import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -60,6 +65,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -85,6 +91,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction { public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions"); public static final NodeFeature SIMULATE_MAPPING_ADDITION = new NodeFeature("simulate.mapping.addition"); public static final NodeFeature SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING = new NodeFeature("simulate.support.non.template.mapping"); + public static final NodeFeature SIMULATE_IGNORED_FIELDS = new NodeFeature("simulate.ignored.fields"); private final IndicesService indicesService; private final NamedXContentRegistry xContentRegistry; private final Set indexSettingProviders; @@ -137,12 +144,13 @@ protected void doInternalExecute( DocWriteRequest docRequest = bulkRequest.requests.get(i); assert docRequest instanceof IndexRequest : "TransportSimulateBulkAction should only ever be called with IndexRequests"; IndexRequest request = (IndexRequest) docRequest; - Exception mappingValidationException = validateMappings( + Tuple, Exception> validationResult = validateMappings( componentTemplateSubstitutions, indexTemplateSubstitutions, mappingAddition, request ); + Exception mappingValidationException = validationResult.v2(); responses.set( i, BulkItemResponse.success( @@ -155,6 +163,7 @@ protected void doInternalExecute( request.source(), request.getContentType(), request.getExecutedPipelines(), + validationResult.v1(), mappingValidationException ) ) @@ -168,11 +177,12 @@ protected void doInternalExecute( /** * This creates a temporary index with the mappings of the index in the request, and then attempts to index the source from the request * into it. If there is a mapping exception, that exception is returned. On success the returned exception is null. - * @parem componentTemplateSubstitutions The component template definitions to use in place of existing ones for validation + * @param componentTemplateSubstitutions The component template definitions to use in place of existing ones for validation * @param request The IndexRequest whose source will be validated against the mapping (if it exists) of its index - * @return a mapping exception if the source does not match the mappings, otherwise null + * @return a Tuple containing: (1) in v1 the names of any fields that would be ignored upon indexing and (2) in v2 the mapping + * exception if the source does not match the mappings, otherwise null */ - private Exception validateMappings( + private Tuple, Exception> validateMappings( Map componentTemplateSubstitutions, Map indexTemplateSubstitutions, Map mappingAddition, @@ -189,6 +199,7 @@ private Exception validateMappings( ClusterState state = clusterService.state(); Exception mappingValidationException = null; + Collection ignoredFields = List.of(); IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(request.index()); try { if (indexAbstraction != null @@ -275,7 +286,7 @@ private Exception validateMappings( ); CompressedXContent mappings = template.mappings(); CompressedXContent mergedMappings = mergeMappings(mappings, mappingAddition); - validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse); + ignoredFields = validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse); } else { List matchingTemplates = findV1Templates(simulatedState.metadata(), request.index(), false); if (matchingTemplates.isEmpty() == false) { @@ -289,7 +300,7 @@ private Exception validateMappings( xContentRegistry ); final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition); - validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse); } else if (indexAbstraction != null && mappingAddition.isEmpty() == false) { /* * The index matched no templates of any kind, including the substitutions. But it might have a mapping. So we @@ -298,7 +309,7 @@ private Exception validateMappings( MappingMetadata mappingFromIndex = clusterService.state().metadata().index(indexAbstraction.getName()).mapping(); CompressedXContent currentIndexCompressedXContent = mappingFromIndex == null ? null : mappingFromIndex.source(); CompressedXContent combinedMappings = mergeMappings(currentIndexCompressedXContent, mappingAddition); - validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse); } else { /* * The index matched no templates and had no mapping of its own. If there were component template substitutions @@ -306,27 +317,28 @@ private Exception validateMappings( * and validate. */ final CompressedXContent combinedMappings = mergeMappings(null, mappingAddition); - validateUpdatedMappings(null, combinedMappings, request, sourceToParse); + ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse); } } } } catch (Exception e) { mappingValidationException = e; } - return mappingValidationException; + return Tuple.tuple(ignoredFields, mappingValidationException); } /* - * Validates that when updatedMappings are applied + * Validates that when updatedMappings are applied. If any fields would be ignored while indexing, then those field names are returned. + * Otherwise the returned Collection is empty. */ - private void validateUpdatedMappings( + private Collection validateUpdatedMappings( @Nullable CompressedXContent originalMappings, @Nullable CompressedXContent updatedMappings, IndexRequest request, SourceToParse sourceToParse ) throws IOException { if (updatedMappings == null) { - return; // no validation to do + return List.of(); // no validation to do } Settings dummySettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) @@ -343,7 +355,7 @@ private void validateUpdatedMappings( .settings(dummySettings) .putMapping(new MappingMetadata(updatedMappings)) .build(); - indicesService.withTempIndexService(originalIndexMetadata, indexService -> { + Engine.Index result = indicesService.withTempIndexService(originalIndexMetadata, indexService -> { indexService.mapperService().merge(updatedIndexMetadata, MapperService.MergeReason.MAPPING_UPDATE); return IndexShard.prepareIndex( indexService.mapperService(), @@ -360,6 +372,24 @@ private void validateUpdatedMappings( 0 ); }); + final Collection ignoredFields; + if (result == null) { + ignoredFields = List.of(); + } else { + List luceneDocuments = result.parsedDoc().docs(); + assert luceneDocuments == null || luceneDocuments.size() == 1 : "Expected a single lucene document from index attempt"; + if (luceneDocuments != null && luceneDocuments.size() == 1) { + ignoredFields = luceneDocuments.getFirst() + .getFields() + .stream() + .filter(field -> field.name().equals(IgnoredFieldMapper.NAME) && field instanceof StringField) + .map(IndexableField::stringValue) + .toList(); + } else { + ignoredFields = List.of(); + } + } + return ignoredFields; } private static CompressedXContent mergeMappings(@Nullable CompressedXContent originalMapping, Map mappingAddition) diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java index 9d883cb075ede..307996a4c72cb 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateIndexResponse.java @@ -24,6 +24,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Collection; import java.util.List; /** @@ -34,6 +35,7 @@ public class SimulateIndexResponse extends IndexResponse { private final BytesReference source; private final XContentType sourceXContentType; + private final Collection ignoredFields; private final Exception exception; @SuppressWarnings("this-escape") @@ -47,6 +49,11 @@ public SimulateIndexResponse(StreamInput in) throws IOException { } else { this.exception = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_IGNORED_FIELDS)) { + this.ignoredFields = in.readStringCollectionAsList(); + } else { + this.ignoredFields = List.of(); + } } @SuppressWarnings("this-escape") @@ -57,6 +64,7 @@ public SimulateIndexResponse( BytesReference source, XContentType sourceXContentType, List pipelines, + Collection ignoredFields, @Nullable Exception exception ) { // We don't actually care about most of the IndexResponse fields: @@ -73,6 +81,7 @@ public SimulateIndexResponse( this.source = source; this.sourceXContentType = sourceXContentType; setShardInfo(ShardInfo.EMPTY); + this.ignoredFields = ignoredFields; this.exception = exception; } @@ -84,6 +93,16 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.field("_source", XContentHelper.convertToMap(source, false, sourceXContentType).v2()); assert executedPipelines != null : "executedPipelines is null when it shouldn't be - we always list pipelines in simulate mode"; builder.array("executed_pipelines", executedPipelines.toArray()); + if (ignoredFields.isEmpty() == false) { + builder.startArray("ignored_fields"); + for (String ignoredField : ignoredFields) { + builder.startObject(); + builder.field("field", ignoredField); + builder.endObject(); + } + ; + builder.endArray(); + } if (exception != null) { builder.startObject("error"); ElasticsearchException.generateThrowableXContent(builder, params, exception); @@ -105,6 +124,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeException(exception); } + if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_IGNORED_FIELDS)) { + out.writeStringCollection(ignoredFields); + } } public Exception getException() { diff --git a/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java b/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java index dfbcb21c2a959..ec7c72c0ab428 100644 --- a/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/local/LocalClusterStateRequest.java @@ -9,10 +9,13 @@ package org.elasticsearch.action.support.local; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import java.io.IOException; import java.util.Objects; @@ -32,6 +35,20 @@ protected LocalClusterStateRequest(TimeValue masterTimeout) { this.masterTimeout = Objects.requireNonNull(masterTimeout); } + /** + * This constructor exists solely for BwC purposes. It should exclusively be used by requests that used to extend + * {@link org.elasticsearch.action.support.master.MasterNodeReadRequest} and still need to be able to serialize incoming request. + */ + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) + protected LocalClusterStateRequest(StreamInput in) throws IOException { + super(in); + masterTimeout = in.readTimeValue(); + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { + in.readVLong(); + } + in.readBoolean(); + } + @Override public final void writeTo(StreamOutput out) throws IOException { TransportAction.localOnly(); diff --git a/server/src/main/java/org/elasticsearch/inference/UnifiedCompletionRequest.java b/server/src/main/java/org/elasticsearch/inference/UnifiedCompletionRequest.java index e596be626b518..32ed68953041a 100644 --- a/server/src/main/java/org/elasticsearch/inference/UnifiedCompletionRequest.java +++ b/server/src/main/java/org/elasticsearch/inference/UnifiedCompletionRequest.java @@ -122,7 +122,12 @@ public record Message(Content content, String role, @Nullable String name, @Null ); static { - PARSER.declareField(constructorArg(), (p, c) -> parseContent(p), new ParseField("content"), ObjectParser.ValueType.VALUE_ARRAY); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> parseContent(p), + new ParseField("content"), + ObjectParser.ValueType.VALUE_ARRAY + ); PARSER.declareString(constructorArg(), new ParseField("role")); PARSER.declareString(optionalConstructorArg(), new ParseField("name")); PARSER.declareString(optionalConstructorArg(), new ParseField("tool_call_id")); @@ -143,7 +148,7 @@ private static Content parseContent(XContentParser parser) throws IOException { public Message(StreamInput in) throws IOException { this( - in.readNamedWriteable(Content.class), + in.readOptionalNamedWriteable(Content.class), in.readString(), in.readOptionalString(), in.readOptionalString(), @@ -153,7 +158,7 @@ public Message(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeNamedWriteable(content); + out.writeOptionalNamedWriteable(content); out.writeString(role); out.writeOptionalString(name); out.writeOptionalString(toolCallId); diff --git a/server/src/main/java/org/elasticsearch/rest/RestUtils.java b/server/src/main/java/org/elasticsearch/rest/RestUtils.java index bbca086e345f7..10e72035cf1f5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestUtils.java +++ b/server/src/main/java/org/elasticsearch/rest/RestUtils.java @@ -9,11 +9,16 @@ package org.elasticsearch.rest; +import org.elasticsearch.action.support.local.TransportLocalClusterStateAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import java.net.URI; import java.nio.charset.Charset; @@ -323,4 +328,23 @@ public static TimeValue getTimeout(RestRequest restRequest) { assert restRequest != null; return restRequest.paramAsTime(REST_TIMEOUT_PARAM, null); } + + // Remove the BWC support for the deprecated ?local parameter. + // NOTE: ensure each usage of this method has been deprecated for long enough to remove it. + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) + public static void consumeDeprecatedLocalParameter(RestRequest request) { + if (request.hasParam("local") == false) { + return; + } + // Consume this param just for validation when in BWC mode. + final var local = request.paramAsBoolean("local", false); + if (request.getRestApiVersion() != RestApiVersion.V_8) { + DeprecationLogger.getLogger(TransportLocalClusterStateAction.class) + .critical( + DeprecationCategory.API, + "TransportLocalClusterStateAction-local-parameter", + "the [?local] query parameter to this API has no effect, is now deprecated, and will be removed in a future version" + ); + } + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java index 466e45ff5dfc1..390870be9de04 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetComponentTemplateAction.java @@ -14,8 +14,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; @@ -26,11 +28,12 @@ import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @ServerlessScope(Scope.PUBLIC) public class RestGetComponentTemplateAction extends BaseRestHandler { + private static final Set SUPPORTED_CAPABILITIES = Set.of("local_param_deprecated"); + @Override public List routes() { return List.of( @@ -47,18 +50,23 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - - final GetComponentTemplateAction.Request getRequest = new GetComponentTemplateAction.Request(request.param("name")); + final GetComponentTemplateAction.Request getRequest = new GetComponentTemplateAction.Request( + RestUtils.getMasterNodeTimeout(request), + request.param("name") + ); getRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); - getRequest.local(request.paramAsBoolean("local", getRequest.local())); - getRequest.masterNodeTimeout(getMasterNodeTimeout(request)); + RestUtils.consumeDeprecatedLocalParameter(request); final boolean implicitAll = getRequest.name() == null; - return channel -> client.execute(GetComponentTemplateAction.INSTANCE, getRequest, new RestToXContentListener<>(channel, r -> { - final boolean templateExists = r.getComponentTemplates().isEmpty() == false; - return (templateExists || implicitAll) ? OK : NOT_FOUND; - })); + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute( + GetComponentTemplateAction.INSTANCE, + getRequest, + new RestToXContentListener<>(channel, r -> { + final boolean templateExists = r.getComponentTemplates().isEmpty() == false; + return (templateExists || implicitAll) ? OK : NOT_FOUND; + }) + ); } @Override @@ -66,4 +74,8 @@ protected Set responseParams() { return Settings.FORMAT_PARAMS; } + @Override + public Set supportedCapabilities() { + return SUPPORTED_CAPABILITIES; + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java index 994f71c82e6e1..6d72ff9558105 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java @@ -19,16 +19,12 @@ import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.util.HashMap; import java.util.Map; import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomAliases; @@ -37,30 +33,7 @@ import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.containsString; -public class GetComponentTemplateResponseTests extends AbstractWireSerializingTestCase { - @Override - protected Writeable.Reader instanceReader() { - return GetComponentTemplateAction.Response::new; - } - - @Override - protected GetComponentTemplateAction.Response createTestInstance() { - return new GetComponentTemplateAction.Response( - randomBoolean() ? Map.of() : randomTemplates(), - RolloverConfigurationTests.randomRolloverConditions() - ); - } - - @Override - protected GetComponentTemplateAction.Response mutateInstance(GetComponentTemplateAction.Response instance) { - var templates = instance.getComponentTemplates(); - var rolloverConditions = instance.getRolloverConfiguration(); - switch (randomInt(1)) { - case 0 -> templates = templates == null ? randomTemplates() : null; - case 1 -> rolloverConditions = randomValueOtherThan(rolloverConditions, RolloverConfigurationTests::randomRolloverConditions); - } - return new GetComponentTemplateAction.Response(templates, rolloverConditions); - } +public class GetComponentTemplateResponseTests extends ESTestCase { public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { Settings settings = null; @@ -102,17 +75,4 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws } } } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(IndicesModule.getNamedWriteables()); - } - - private static Map randomTemplates() { - Map templates = new HashMap<>(); - for (int i = 0; i < randomIntBetween(1, 4); i++) { - templates.put(randomAlphaOfLength(4), ComponentTemplateTests.randomInstance()); - } - return templates; - } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateIndexResponseTests.java index 4b226bc41b09a..f3ec4fc1ac1c3 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateIndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateIndexResponseTests.java @@ -48,6 +48,7 @@ public void testToXContent() throws IOException { sourceBytes, XContentType.JSON, pipelines, + List.of(), null ); @@ -79,6 +80,7 @@ public void testToXContent() throws IOException { sourceBytes, XContentType.JSON, pipelines, + List.of(), new ElasticsearchException("Some failure") ); @@ -103,6 +105,39 @@ public void testToXContent() throws IOException { ), Strings.toString(indexResponseWithException) ); + + SimulateIndexResponse indexResponseWithIgnoredFields = new SimulateIndexResponse( + id, + index, + version, + sourceBytes, + XContentType.JSON, + pipelines, + List.of("abc", "def"), + null + ); + + assertEquals( + XContentHelper.stripWhitespace( + Strings.format( + """ + { + "_id": "%s", + "_index": "%s", + "_version": %d, + "_source": %s, + "executed_pipelines": [%s], + "ignored_fields": [{"field": "abc"}, {"field": "def"}] + }""", + id, + index, + version, + source, + pipelines.stream().map(pipeline -> "\"" + pipeline + "\"").collect(Collectors.joining(",")) + ) + ), + Strings.toString(indexResponseWithIgnoredFields) + ); } public void testSerialization() throws IOException { @@ -135,6 +170,7 @@ private static SimulateIndexResponse randomIndexResponse() { sourceBytes, xContentType, pipelines, + randomList(0, 20, () -> randomAlphaOfLength(15)), randomBoolean() ? null : new ElasticsearchException("failed") ); } diff --git a/server/src/test/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestActionTests.java index c29ce51ecc01b..ac6c66a13b507 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestActionTests.java @@ -175,6 +175,14 @@ public void testSimulateIngestRestToXContentListener() throws Exception { "executed_pipelines" : [ "pipeline1", "pipeline2" + ], + "ignored_fields" : [ + { + "field" : "abc" + }, + { + "field" : "def" + } ] } }, @@ -199,6 +207,14 @@ public void testSimulateIngestRestToXContentListener() throws Exception { "executed_pipelines" : [ "pipeline1", "pipeline2" + ], + "ignored_fields" : [ + { + "field" : "abc" + }, + { + "field" : "def" + } ] } } @@ -228,6 +244,7 @@ private BulkItemResponse getSuccessBulkItemResponse(String id, String source) { BytesReference.fromByteBuffers(sourceByteBuffer), XContentType.JSON, List.of("pipeline1", "pipeline2"), + List.of("abc", "def"), null ) ); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index 75e0c0f459a2a..368ffd7320089 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -120,7 +120,7 @@ private void deleteTemplates(Set excludeTemplates, ActionListener ); final SubscribableListener getComponentTemplates = SubscribableListener.newForked( - l -> client().execute(GetComponentTemplateAction.INSTANCE, new GetComponentTemplateAction.Request("*"), l) + l -> client().execute(GetComponentTemplateAction.INSTANCE, new GetComponentTemplateAction.Request(TEST_REQUEST_TIMEOUT, "*"), l) ); SubscribableListener diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index fa525705a9b39..f409dc17b0ed0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -852,8 +852,7 @@ protected boolean preserveSearchableSnapshotsIndicesUponCompletion() { } private void wipeCluster() throws Exception { - logger.info("Waiting for all cluster updates up to this moment to be processed"); - assertOK(adminClient().performRequest(new Request("GET", "_cluster/health?wait_for_events=languid"))); + waitForClusterUpdates(); // Cleanup rollup before deleting indices. A rollup job might have bulks in-flight, // so we need to fully shut them down first otherwise a job might stall waiting @@ -991,6 +990,38 @@ private void wipeCluster() throws Exception { deleteAllNodeShutdownMetadata(); } + private void waitForClusterUpdates() throws Exception { + logger.info("Waiting for all cluster updates up to this moment to be processed"); + try { + assertOK(adminClient().performRequest(new Request("GET", "_cluster/health?wait_for_events=languid"))); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == HttpStatus.SC_REQUEST_TIMEOUT) { + final var pendingTasks = getPendingClusterStateTasks(); + if (pendingTasks != null) { + logger.error("Timed out waiting for cluster updates to be processed, {}", pendingTasks); + } + } + throw e; + } + } + + private static String getPendingClusterStateTasks() { + try { + Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks")); + List tasks = (List) entityAsMap(response).get("tasks"); + if (false == tasks.isEmpty()) { + StringBuilder message = new StringBuilder("there are still running tasks:"); + for (Object task : tasks) { + message.append('\n').append(task.toString()); + } + return message.toString(); + } + } catch (IOException e) { + fail(e, "Failed to retrieve pending tasks in the cluster during cleanup"); + } + return null; + } + /** * This method checks whether ILM policies or templates get recreated after they have been deleted. If so, we are probably deleting * them unnecessarily, potentially causing test performance problems. This could happen for example if someone adds a new standard ILM @@ -1461,18 +1492,9 @@ private void logIfThereAreRunningTasks() throws IOException { */ private static void waitForClusterStateUpdatesToFinish() throws Exception { assertBusy(() -> { - try { - Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks")); - List tasks = (List) entityAsMap(response).get("tasks"); - if (false == tasks.isEmpty()) { - StringBuilder message = new StringBuilder("there are still running tasks:"); - for (Object task : tasks) { - message.append('\n').append(task.toString()); - } - fail(message.toString()); - } - } catch (IOException e) { - fail("cannot get cluster's pending tasks: " + e.getMessage()); + final var pendingTasks = getPendingClusterStateTasks(); + if (pendingTasks != null) { + fail(pendingTasks); } }, 30, TimeUnit.SECONDS); } diff --git a/updatecli-compose.yaml b/updatecli-compose.yaml index 1893c3abca3ec..19bcac28ab1a9 100644 --- a/updatecli-compose.yaml +++ b/updatecli-compose.yaml @@ -7,7 +7,7 @@ policies: - .github/updatecli/values.d/scm.yml - .github/updatecli/values.d/ironbank.yml - name: Update Updatecli policies - policy: ghcr.io/updatecli/policies/autodiscovery/updatecli:0.6.0@sha256:6bd6999620674b2fbb1d374f7a1a5e9740d042667f0592900b44259f3e1ae98f + policy: ghcr.io/updatecli/policies/autodiscovery/updatecli:0.8.0@sha256:99e9e61b501575c2c176c39f2275998d198b590a3f6b1fe829f7315f8d457e7f values: - .github/updatecli/values.d/scm.yml - .github/updatecli/values.d/updatecli-compose.yml diff --git a/x-pack/build.gradle b/x-pack/build.gradle index e79d2fe0e9899..86d56ef569adb 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -36,7 +36,7 @@ subprojects { } project.pluginManager.withPlugin("elasticsearch.licensing") { - ext.projectLicenses.set(['Elastic License 2.0': ext.elasticLicenseUrl.get()]) + ext.projectLicenses.set(['Elastic License 2.0': ext.elasticLicenseUrl]) } project.pluginManager.withPlugin("elasticsearch.build") { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 58a0370efb50e..01a26a7a0b7bb 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -58,6 +58,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.TimeSeriesParams; @@ -723,6 +724,9 @@ private static void addTimestampField( if (mapping.get("format") != null) { builder.field("format", mapping.get("format")); } + if (mapping.get("ignore_malformed") != null) { + builder.field("ignore_malformed", mapping.get("ignore_malformed")); + } } } catch (IOException e) { throw new ElasticsearchException("Unable to create timestamp field mapping for field [" + timestampField + "]", e); @@ -897,6 +901,12 @@ private void createDownsampleIndex( sourceIndexMetadata.getSettings().get(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey()) ); } + if (sourceIndexMetadata.getSettings().hasValue(FieldMapper.IGNORE_MALFORMED_SETTING.getKey())) { + builder.put( + FieldMapper.IGNORE_MALFORMED_SETTING.getKey(), + sourceIndexMetadata.getSettings().get(FieldMapper.IGNORE_MALFORMED_SETTING.getKey()) + ); + } CreateIndexClusterStateUpdateRequest createIndexClusterStateUpdateRequest = new CreateIndexClusterStateUpdateRequest( "downsample", diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 812b48ee4cae5..33a436a1c7f1b 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesParams; @@ -201,14 +202,19 @@ public void setup() throws IOException { IndexSettings.TIME_SERIES_START_TIME.getKey(), DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(Instant.ofEpochMilli(startTime).toEpochMilli()) ) - .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z"); + .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z") + .put(FieldMapper.IGNORE_MALFORMED_SETTING.getKey(), randomBoolean()); if (randomBoolean()) { settings.put(IndexMetadata.SETTING_INDEX_HIDDEN, randomBoolean()); } XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties"); - mapping.startObject(FIELD_TIMESTAMP).field("type", "date").endObject(); + mapping.startObject(FIELD_TIMESTAMP).field("type", "date"); + if (settings.get(FieldMapper.IGNORE_MALFORMED_SETTING.getKey()).equals("true")) { + mapping.field("ignore_malformed", false); + } + mapping.endObject(); // Dimensions mapping.startObject(FIELD_DIMENSION_1).field("type", "keyword").field("time_series_dimension", true).endObject(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java index 602fd29433193..bd77bd7ff1e46 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java @@ -130,8 +130,20 @@ public static class GroupingState implements Releasable { private final BytesRefHash bytes; private GroupingState(BigArrays bigArrays) { - values = new LongLongHash(1, bigArrays); - bytes = new BytesRefHash(1, bigArrays); + LongLongHash _values = null; + BytesRefHash _bytes = null; + try { + _values = new LongLongHash(1, bigArrays); + _bytes = new BytesRefHash(1, bigArrays); + + values = _values; + bytes = _bytes; + + _values = null; + _bytes = null; + } finally { + Releasables.closeExpectNoException(_values, _bytes); + } } void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st index a8884c58116f3..1cef234b2238f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st @@ -244,8 +244,20 @@ $endif$ $if(long||double)$ values = new LongLongHash(1, bigArrays); $elseif(BytesRef)$ - values = new LongLongHash(1, bigArrays); - bytes = new BytesRefHash(1, bigArrays); + LongLongHash _values = null; + BytesRefHash _bytes = null; + try { + _values = new LongLongHash(1, bigArrays); + _bytes = new BytesRefHash(1, bigArrays); + + values = _values; + bytes = _bytes; + + _values = null; + _bytes = null; + } finally { + Releasables.closeExpectNoException(_values, _bytes); + } $elseif(int||float)$ values = new LongHash(1, bigArrays); $endif$ diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java new file mode 100644 index 0000000000000..c0a91fe22b87b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.operator.SequenceBytesRefBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; + +import java.util.Arrays; +import java.util.List; +import java.util.TreeSet; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ValuesBytesRefAggregatorFunctionTests extends AggregatorFunctionTestCase { + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceBytesRefBlockSourceOperator( + blockFactory, + IntStream.range(0, size).mapToObj(l -> new BytesRef(randomAlphaOfLengthBetween(0, 100))) + ); + } + + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of bytes"; + } + + @Override + public void assertSimpleOutput(List input, Block result) { + TreeSet set = new TreeSet<>((List) BlockUtils.toJavaObject(result, 0)); + Object[] values = input.stream() + .flatMap(AggregatorFunctionTestCase::allBytesRefs) + .collect(Collectors.toSet()) + .toArray(Object[]::new); + if (false == set.containsAll(Arrays.asList(values))) { + assertThat(set, containsInAnyOrder(values)); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java new file mode 100644 index 0000000000000..fc9bc90828df3 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunctionTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.LongBytesRefTupleBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Tuple; + +import java.util.Arrays; +import java.util.List; +import java.util.TreeSet; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesBytesRefGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of bytes"; + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new LongBytesRefTupleBlockSourceOperator( + blockFactory, + IntStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), new BytesRef(randomAlphaOfLengthBetween(0, 100)))) + ); + } + + @Override + public void assertSimpleGroup(List input, Block result, int position, Long group) { + Object[] values = input.stream().flatMap(p -> allBytesRefs(p, group)).collect(Collectors.toSet()).toArray(Object[]::new); + Object resultValue = BlockUtils.toJavaObject(result, position); + switch (values.length) { + case 0 -> assertThat(resultValue, nullValue()); + case 1 -> assertThat(resultValue, equalTo(values[0])); + default -> { + TreeSet set = new TreeSet<>((List) resultValue); + if (false == set.containsAll(Arrays.asList(values))) { + assertThat(set, containsInAnyOrder(values)); + } + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index be792a0ef2612..54db0453530bc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -143,6 +143,7 @@ public final void testSimpleWithCranky() { DriverContext driverContext = crankyDriverContext(); + Exception exception = null; boolean driverStarted = false; try { Operator operator = simple().get(driverContext); @@ -150,8 +151,8 @@ public final void testSimpleWithCranky() { drive(operator, input.iterator(), driverContext); // Either we get lucky and cranky doesn't throw and the test completes or we don't and it throws } catch (CircuitBreakingException e) { - logger.info("broken", e); assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + exception = e; } if (driverStarted == false) { // if drive hasn't even started then we need to release the input pages @@ -159,7 +160,14 @@ public final void testSimpleWithCranky() { } // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. - assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); + long inputUsedBytes = inputFactoryContext.breaker().getUsed(); + if (inputUsedBytes != 0L) { + fail(exception, "Expected no used bytes for input, found: " + inputUsedBytes); + } + long driverUsedBytes = driverContext.breaker().getUsed(); + if (driverUsedBytes != 0L) { + fail(exception, "Expected no used bytes for driver, found: " + driverUsedBytes); + } } /** diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 9566aeb8f28dc..d9f211405e23e 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -30,6 +30,7 @@ import org.junit.ClassRule; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Map; @@ -87,9 +88,11 @@ private void indexDocument(String index, int id, double value, String org) throw @Before public void indexDocuments() throws IOException { + Settings lookupSettings = Settings.builder().put("index.mode", "lookup").build(); String mapping = """ "properties":{"value": {"type": "double"}, "org": {"type": "keyword"}} """; + createIndex("index", Settings.EMPTY, mapping); indexDocument("index", 1, 10.0, "sales"); indexDocument("index", 2, 20.0, "engineering"); @@ -110,6 +113,16 @@ public void indexDocuments() throws IOException { indexDocument("indexpartial", 2, 40.0, "sales"); refresh("indexpartial"); + createIndex("lookup-user1", lookupSettings, mapping); + indexDocument("lookup-user1", 1, 12.0, "engineering"); + indexDocument("lookup-user1", 2, 31.0, "sales"); + refresh("lookup-user1"); + + createIndex("lookup-user2", lookupSettings, mapping); + indexDocument("lookup-user2", 1, 32.0, "marketing"); + indexDocument("lookup-user2", 2, 40.0, "sales"); + refresh("lookup-user2"); + if (aliasExists("second-alias") == false) { Request aliasRequest = new Request("POST", "_aliases"); aliasRequest.setJsonEntity(""" @@ -126,6 +139,17 @@ public void indexDocuments() throws IOException { } } }, + { + "add": { + "alias": "lookup-first-alias", + "index": "lookup-user1", + "filter": { + "term": { + "org": "sales" + } + } + } + }, { "add": { "alias": "second-alias", @@ -229,22 +253,30 @@ public void testAliasFilter() throws Exception { public void testUnauthorizedIndices() throws IOException { ResponseException error; error = expectThrows(ResponseException.class, () -> runESQLCommand("user1", "from index-user2 | stats sum(value)")); + assertThat(error.getMessage(), containsString("Unknown index [index-user2]")); assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400)); error = expectThrows(ResponseException.class, () -> runESQLCommand("user2", "from index-user1 | stats sum(value)")); + assertThat(error.getMessage(), containsString("Unknown index [index-user1]")); assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400)); error = expectThrows(ResponseException.class, () -> runESQLCommand("alias_user2", "from index-user2 | stats sum(value)")); + assertThat(error.getMessage(), containsString("Unknown index [index-user2]")); assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400)); error = expectThrows(ResponseException.class, () -> runESQLCommand("metadata1_read2", "from index-user1 | stats sum(value)")); + assertThat(error.getMessage(), containsString("Unknown index [index-user1]")); assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400)); } public void testInsufficientPrivilege() { - Exception error = expectThrows(Exception.class, () -> runESQLCommand("metadata1_read2", "FROM index-user1 | STATS sum=sum(value)")); + ResponseException error = expectThrows( + ResponseException.class, + () -> runESQLCommand("metadata1_read2", "FROM index-user1 | STATS sum=sum(value)") + ); logger.info("error", error); assertThat(error.getMessage(), containsString("Unknown index [index-user1]")); + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); } public void testIndexPatternErrorMessageComparison_ESQL_SearchDSL() throws Exception { @@ -511,6 +543,63 @@ record Listen(long timestamp, String songId, double duration) { } } + public void testLookupJoinIndexAllowed() throws Exception { + Response resp = runESQLCommand( + "metadata1_read2", + "ROW x = 40.0 | EVAL value = x | LOOKUP JOIN `lookup-user2` ON value | KEEP x, org" + ); + assertOK(resp); + Map respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword"))) + ); + assertThat(respMap.get("values"), equalTo(List.of(List.of(40.0, "sales")))); + + // Alias, should find the index and the row + resp = runESQLCommand("alias_user1", "ROW x = 31.0 | EVAL value = x | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x, org"); + assertOK(resp); + respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword"))) + ); + assertThat(respMap.get("values"), equalTo(List.of(List.of(31.0, "sales")))); + + // Alias, for a row that's filtered out + resp = runESQLCommand("alias_user1", "ROW x = 123.0 | EVAL value = x | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x, org"); + assertOK(resp); + respMap = entityAsMap(resp); + assertThat( + respMap.get("columns"), + equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword"))) + ); + assertThat(respMap.get("values"), equalTo(List.of(Arrays.asList(123.0, null)))); + } + + public void testLookupJoinIndexForbidden() { + var resp = expectThrows( + ResponseException.class, + () -> runESQLCommand("metadata1_read2", "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN `lookup-user1` ON value | KEEP x") + ); + assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); + assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + + resp = expectThrows( + ResponseException.class, + () -> runESQLCommand("metadata1_read2", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN `lookup-user1` ON value | KEEP x") + ); + assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); + assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + + resp = expectThrows( + ResponseException.class, + () -> runESQLCommand("alias_user1", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN `lookup-user1` ON value | KEEP x") + ); + assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); + assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + } + private void createEnrichPolicy() throws Exception { createIndex("songs", Settings.EMPTY, """ "properties":{"song_id": {"type": "keyword"}, "title": {"type": "keyword"}, "artist": {"type": "keyword"} } diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml index 365a072edb74e..f46e7ef56f3a1 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml @@ -35,15 +35,15 @@ user2: metadata1_read2: cluster: [] indices: - - names: [ 'index-user1' ] + - names: [ 'index-user1', 'lookup-user1' ] privileges: [ 'view_index_metadata' ] - - names: [ 'index-user2' ] + - names: [ 'index-user2', 'lookup-user2' ] privileges: [ 'read' ] alias_user1: cluster: [] indices: - - names: [ 'first-alias' ] + - names: [ 'first-alias', 'lookup-first-alias' ] privileges: - read diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index d4b087277df52..9a09401785df0 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -21,7 +21,7 @@ import java.util.List; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V8; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V9; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @@ -96,7 +96,7 @@ protected boolean supportsInferenceTestService() { @Override protected boolean supportsIndexModeLookup() throws IOException { - return hasCapabilities(List.of(JOIN_LOOKUP_V8.capabilityName())); + return hasCapabilities(List.of(JOIN_LOOKUP_V9.capabilityName())); } @Override diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index d7c57e23b7147..a809216d3beb3 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V8; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V9; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,7 +124,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V8.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V9.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { @@ -283,8 +283,8 @@ protected boolean supportsInferenceTestService() { @Override protected boolean supportsIndexModeLookup() throws IOException { - // CCS does not yet support JOIN_LOOKUP_V8 and clusters falsely report they have this capability - // return hasCapabilities(List.of(JOIN_LOOKUP_V8.capabilityName())); + // CCS does not yet support JOIN_LOOKUP_V9 and clusters falsely report they have this capability + // return hasCapabilities(List.of(JOIN_LOOKUP_V9.capabilityName())); return false; } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java index 355c403ce2a86..a83b6cf2e906c 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -221,7 +221,7 @@ public void testIndicesDontExist() throws IOException { assertThat(e.getMessage(), containsString("index_not_found_exception")); assertThat(e.getMessage(), containsString("no such index [foo]")); - if (EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()) { + if (EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()) { e = expectThrows( ResponseException.class, () -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM test1 | LOOKUP JOIN foo ON id1")) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 22ff3192cc716..57c4fca6223e1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -497,6 +497,7 @@ null evalDateParseWithTimezone required_capability: date_parse_tz row s = "12/Jul/2022:10:24:10 +0900" | eval d = date_parse("dd/MMM/yyyy:HH:mm:ss Z", s); +warningRegex:Date format \[dd/MMM/yyyy:HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.* s:keyword | d:datetime 12/Jul/2022:10:24:10 +0900 | 2022-07-12T01:24:10.000Z @@ -505,6 +506,7 @@ s:keyword | d:datetime evalDateParseWithTimezoneCrossingDayBoundary required_capability: date_parse_tz row s = "12/Jul/2022:08:24:10 +0900" | eval d = date_parse("dd/MMM/yyyy:HH:mm:ss Z", s); +warningRegex:Date format \[dd/MMM/yyyy:HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.* s:keyword | d:datetime 12/Jul/2022:08:24:10 +0900 | 2022-07-11T23:24:10.000Z @@ -517,6 +519,8 @@ row s1 = "12/Jul/2022:10:24:10 +0900", s2 = "2022/12/07 09:24:10 +0800" | eval eq = d1 == d2 | keep d1, eq ; +warningRegex:Date format \[dd/MMM/yyyy:HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.* +warningRegex:Date format \[yyyy/dd/MM HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.* d1:datetime | eq:boolean 2022-07-12T01:24:10.000Z | true @@ -529,6 +533,7 @@ row s = "2022/12/07 09:24:10", format="yyyy/dd/MM HH:mm:ss" | eval with_tz = date_parse(concat(format, " Z"), concat(s, " +0900")) | keep s, no_tz, with_tz ; +warningRegex:Date format \[yyyy/dd/MM HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.* s:keyword | no_tz:datetime | with_tz:datetime 2022/12/07 09:24:10 | 2022-07-12T09:24:10.000Z | 2022-07-12T00:24:10.000Z @@ -543,6 +548,7 @@ row s = "2022/12/07 09:24:10", format="yyyy/dd/MM HH:mm:ss" | eval with_tz4 = date_parse(concat(format, " O"), concat(s, " GMT+9")) | keep s, with_tz* ; +warningRegex:Date format \[yyyy/dd/MM HH:mm:ss .\] contains textual field specifiers that could change in JDK 23.* s:keyword | with_tz1:datetime | with_tz2:datetime | with_tz3:datetime | with_tz4:datetime 2022/12/07 09:24:10 | 2022-07-12T00:24:10.000Z | 2022-07-12T00:24:10.000Z | 2022-07-12T00:24:10.000Z | 2022-07-12T00:24:10.000Z diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 39638899cf6b6..309386228b1c8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -8,7 +8,7 @@ ############################################### basicOnTheDataNode -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | EVAL language_code = languages @@ -25,7 +25,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; basicRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code @@ -36,7 +36,7 @@ language_code:integer | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -53,7 +53,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; subsequentEvalOnTheDataNode -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | EVAL language_code = languages @@ -71,7 +71,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -89,7 +89,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; sortEvalBeforeLookup -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -106,7 +106,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; nonUniqueLeftKeyOnTheDataNode -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | WHERE emp_no <= 10030 @@ -130,7 +130,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; nonUniqueRightKeyOnTheDataNode -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | EVAL language_code = emp_no % 10 @@ -150,7 +150,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:k ; nonUniqueRightKeyOnTheCoordinator -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -170,7 +170,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:k ; nonUniqueRightKeyFromRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW language_code = 2 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code @@ -183,8 +183,7 @@ language_code:integer | language_name:keyword | country:keyword ; repeatedIndexOnFrom -required_capability: join_lookup_v8 -required_capability: join_lookup_repeated_index_from +required_capability: join_lookup_v9 FROM languages_lookup | LOOKUP JOIN languages_lookup ON language_code @@ -202,7 +201,7 @@ dropAllLookedUpFieldsOnTheDataNode-Ignore // Depends on // https://github.com/elastic/elasticsearch/issues/118778 // https://github.com/elastic/elasticsearch/issues/118781 -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | EVAL language_code = emp_no % 10 @@ -223,7 +222,7 @@ dropAllLookedUpFieldsOnTheCoordinator-Ignore // Depends on // https://github.com/elastic/elasticsearch/issues/118778 // https://github.com/elastic/elasticsearch/issues/118781 -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -248,7 +247,7 @@ emp_no:integer ############################################### filterOnLeftSide -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | EVAL language_code = languages @@ -265,7 +264,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSide -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -281,7 +280,7 @@ FROM sample_data ; filterOnRightSideAfterStats -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -294,7 +293,7 @@ count:long | type:keyword ; filterOnJoinKey -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | EVAL language_code = languages @@ -309,7 +308,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSide -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | WHERE emp_no < 10006 @@ -326,7 +325,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnRightSideOnTheCoordinator -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -342,7 +341,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyOnTheCoordinator -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -358,7 +357,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnJoinKeyAndRightSideOnTheCoordinator -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | SORT emp_no @@ -375,7 +374,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; filterOnTheDataNodeThenFilterOnTheCoordinator -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | EVAL language_code = languages @@ -396,7 +395,7 @@ emp_no:integer | language_code:integer | language_name:keyword ########################################################################### nullJoinKeyOnTheDataNode -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | WHERE emp_no < 10004 @@ -413,7 +412,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; mvJoinKeyOnTheDataNode -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | WHERE 10003 < emp_no AND emp_no < 10008 @@ -431,7 +430,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; mvJoinKeyFromRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW language_code = [4, 5, 6, 7] | LOOKUP JOIN languages_lookup_non_unique_key ON language_code @@ -444,7 +443,7 @@ language_code:integer | language_name:keyword | country:keyword ; mvJoinKeyFromRowExpanded -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW language_code = [4, 5, 6, 7, 8] | MV_EXPAND language_code @@ -466,7 +465,7 @@ language_code:integer | language_name:keyword | country:keyword ########################################################################### joinOnNestedField -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM employees | WHERE 10000 < emp_no AND emp_no < 10006 @@ -486,7 +485,7 @@ emp_no:integer | language.id:integer | language.name:text joinOnNestedFieldRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW language.code = "EN" | LOOKUP JOIN languages_nested_fields ON language.code @@ -499,7 +498,7 @@ language.id:integer | language.code:keyword | language.name.keyword:keyword joinOnNestedNestedFieldRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW language.name.keyword = "English" | LOOKUP JOIN languages_nested_fields ON language.name.keyword @@ -515,7 +514,7 @@ language.id:integer | language.name:text | language.name.keyword:keyword ############################################### lookupIPFromRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -526,7 +525,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromKeepRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", right = "right" | KEEP left, client_ip, right @@ -538,7 +537,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowing -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -549,7 +548,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -562,7 +561,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeepReordered -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -575,7 +574,7 @@ right | Development | 172.21.0.5 ; lookupIPFromIndex -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -594,7 +593,7 @@ ignoreOrder:true ; lookupIPFromIndexKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -614,7 +613,7 @@ ignoreOrder:true ; lookupIPFromIndexKeepKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -636,7 +635,7 @@ timestamp:date | client_ip:keyword | event_duration:long | msg:keyword ; lookupIPFromIndexStats -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -652,7 +651,7 @@ count:long | env:keyword ; lookupIPFromIndexStatsKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -669,7 +668,7 @@ count:long | env:keyword ; statsAndLookupIPFromIndex -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -690,7 +689,7 @@ count:long | client_ip:keyword | env:keyword ############################################### lookupMessageFromRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -701,7 +700,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromKeepRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | KEEP left, message, right @@ -713,7 +712,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowing -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -724,7 +723,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowingKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -736,7 +735,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromIndex -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -754,7 +753,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -773,7 +772,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | KEEP client_ip, event_duration, @timestamp, message @@ -793,7 +792,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepReordered -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -812,7 +811,7 @@ Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; lookupMessageFromIndexStats -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -827,7 +826,7 @@ count:long | type:keyword ; lookupMessageFromIndexStatsKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -843,7 +842,7 @@ count:long | type:keyword ; statsAndLookupMessageFromIndex -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | STATS count = count(message) BY message @@ -861,7 +860,7 @@ count:long | type:keyword | message:keyword ; lookupMessageFromIndexTwice -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -883,7 +882,7 @@ ignoreOrder:true ; lookupMessageFromIndexTwiceKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -906,7 +905,7 @@ ignoreOrder:true ; lookupMessageFromIndexTwiceFullyShadowing -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -930,7 +929,7 @@ ignoreOrder:true ############################################### lookupIPAndMessageFromRow -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -942,7 +941,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBefore -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | KEEP left, client_ip, message, right @@ -955,7 +954,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepBetween -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -968,7 +967,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowKeepAfter -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -981,7 +980,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowing -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", type = "type", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -993,7 +992,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1007,7 +1006,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1022,7 +1021,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepKeepKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1038,7 +1037,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel ; lookupIPAndMessageFromRowWithShadowingKeepReordered -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -1052,7 +1051,7 @@ right | Development | Success | 172.21.0.5 ; lookupIPAndMessageFromIndex -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1072,7 +1071,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1093,7 +1092,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexStats -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1111,7 +1110,7 @@ count:long | env:keyword | type:keyword ; lookupIPAndMessageFromIndexStatsKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1130,7 +1129,7 @@ count:long | env:keyword | type:keyword ; statsAndLookupIPAndMessageFromIndex -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1149,7 +1148,7 @@ count:long | client_ip:keyword | message:keyword | env:keyword | type:keyw ; lookupIPAndMessageFromIndexChainedEvalKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -1171,7 +1170,7 @@ ignoreOrder:true ; lookupIPAndMessageFromIndexChainedRenameKeep -required_capability: join_lookup_v8 +required_capability: join_lookup_v9 FROM sample_data | EVAL client_ip = client_ip::keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5b0cccc1ed430..0ecff0e229ef7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1231,7 +1231,7 @@ a:keyword | upper:keyword | lower:keyword π/2 + a + B + Λ ºC | Π/2 + A + B + Λ ºC | π/2 + a + b + λ ºc ; -equalsToUpperPushedDown[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +equalsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] from employees | where to_upper(first_name) == "GEORGI" | keep emp_no, first_name @@ -1241,7 +1241,7 @@ emp_no:integer | first_name:keyword 10001 | Georgi ; -equalsToUpperNestedPushedDown[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +equalsToUpperNestedPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] from employees | where to_upper(to_upper(to_lower(first_name))) == "GEORGI" | keep emp_no, first_name @@ -1251,7 +1251,7 @@ emp_no:integer | first_name:keyword 10001 | Georgi ; -negatedEqualsToUpperPushedDown[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +negatedEqualsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] from employees | sort emp_no | where not(to_upper(first_name) == "GEORGI") @@ -1263,7 +1263,7 @@ emp_no:integer | first_name:keyword 10002 | Bezalel ; -notEqualsToUpperPushedDown[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +notEqualsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] from employees | sort emp_no | where to_upper(first_name) != "GEORGI" @@ -1275,7 +1275,7 @@ emp_no:integer | first_name:keyword 10002 | Bezalel ; -negatedNotEqualsToUpperPushedDown[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +negatedNotEqualsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] from employees | sort emp_no | where not(to_upper(first_name) != "GEORGI") @@ -1306,7 +1306,7 @@ c:long 90 ; -equalsToUpperNullFolded +equalsToUpperNullFolded#[skip:-8.16.99, reason:function's type corrected in #114334] from employees | where to_upper(null) == "Georgi" | keep emp_no, first_name @@ -1324,7 +1324,7 @@ from employees emp_no:integer | first_name:keyword ; -notEqualsToUpperNullFolded +notEqualsToUpperNullFolded#[skip:-8.16.99, reason:function's type corrected in #114334] from employees | where to_upper(null) != "Georgi" | keep emp_no, first_name @@ -1362,7 +1362,7 @@ c:long 0 ; -equalsToLowerPushedDown[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +equalsToLowerPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] from employees | where to_lower(first_name) == "georgi" | keep emp_no, first_name @@ -1372,7 +1372,7 @@ emp_no:integer | first_name:keyword 10001 | Georgi ; -notEqualsToLowerPushedDown[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] +notEqualsToLowerPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13] from employees | sort emp_no | where to_lower(first_name) != "georgi" diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index b1b11ccb09c86..22f7937ccf4ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -560,12 +560,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V8(Build.current().isSnapshot()), - - /** - * LOOKUP JOIN with the same index as the FROM - */ - JOIN_LOOKUP_REPEATED_INDEX_FROM(JOIN_LOOKUP_V8.isEnabled()), + JOIN_LOOKUP_V9(Build.current().isSnapshot()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index e52e9ae989a92..74c66c0d1b338 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -132,7 +132,6 @@ */ abstract class AbstractLookupService { private final String actionName; - private final String privilegeName; private final ClusterService clusterService; private final SearchService searchService; private final TransportService transportService; @@ -143,7 +142,6 @@ abstract class AbstractLookupService readRequest ) { this.actionName = actionName; - this.privilegeName = privilegeName; this.clusterService = clusterService; this.searchService = searchService; this.transportService = transportService; @@ -237,9 +234,21 @@ public final void lookupAsync(R request, CancellableTask parentTask, ActionListe })); } + /** + * Get the privilege required to perform the lookup. + *

+ * If null is returned, no privilege check will be performed. + *

+ */ + @Nullable + protected abstract String getRequiredPrivilege(); + private void hasPrivilege(ActionListener outListener) { final Settings settings = clusterService.getSettings(); - if (settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) == false || XPackSettings.SECURITY_ENABLED.get(settings) == false) { + String privilegeName = getRequiredPrivilege(); + if (privilegeName == null + || settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) == false + || XPackSettings.SECURITY_ENABLED.get(settings) == false) { outListener.onResponse(null); return; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 50a1ffce4841f..7057b586871eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -52,16 +52,7 @@ public EnrichLookupService( BigArrays bigArrays, BlockFactory blockFactory ) { - super( - LOOKUP_ACTION_NAME, - ClusterPrivilegeResolver.MONITOR_ENRICH.name(), - clusterService, - searchService, - transportService, - bigArrays, - blockFactory, - TransportRequest::readFrom - ); + super(LOOKUP_ACTION_NAME, clusterService, searchService, transportService, bigArrays, blockFactory, TransportRequest::readFrom); } @Override @@ -90,6 +81,11 @@ protected QueryList queryList(TransportRequest request, SearchExecutionContext c }; } + @Override + protected String getRequiredPrivilege() { + return ClusterPrivilegeResolver.MONITOR_ENRICH.name(); + } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { if (fieldType instanceof RangeFieldMapper.RangeFieldType rangeType) { // For range policy types, the ENRICH index field type will be one of a list of supported range types, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index 4f429c46b9123..0bbfc6dd0ce99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -23,7 +23,6 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -50,16 +49,7 @@ public LookupFromIndexService( BigArrays bigArrays, BlockFactory blockFactory ) { - super( - LOOKUP_ACTION_NAME, - ClusterPrivilegeResolver.MONITOR_ENRICH.name(), // TODO some other privilege - clusterService, - searchService, - transportService, - bigArrays, - blockFactory, - TransportRequest::readFrom - ); + super(LOOKUP_ACTION_NAME, clusterService, searchService, transportService, bigArrays, blockFactory, TransportRequest::readFrom); } @Override @@ -83,6 +73,11 @@ protected QueryList queryList(TransportRequest request, SearchExecutionContext c return termQueryList(fieldType, context, inputBlock, inputDataType); } + @Override + protected String getRequiredPrivilege() { + return null; + } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { // TODO: consider supporting implicit type conversion as done in ENRICH for some types if (fieldType.typeName().equals(inputDataType.typeName()) == false) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index dedc612071434..c40263baa6566 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -575,6 +575,15 @@ private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlan if (localSourceExec.indexMode() != IndexMode.LOOKUP) { throw new IllegalArgumentException("can't plan [" + join + "]"); } + Map indicesWithModes = localSourceExec.index().indexNameWithModes(); + if (indicesWithModes.size() != 1) { + throw new IllegalArgumentException("can't plan [" + join + "], found more than 1 index"); + } + var entry = indicesWithModes.entrySet().iterator().next(); + if (entry.getValue() != IndexMode.LOOKUP) { + throw new IllegalArgumentException("can't plan [" + join + "], found index with mode [" + entry.getValue() + "]"); + } + String indexName = entry.getKey(); List matchFields = new ArrayList<>(join.leftFields().size()); for (Attribute m : join.leftFields()) { Layout.ChannelAndType t = source.layout.get(m.id()); @@ -595,7 +604,7 @@ private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlan matchFields.getFirst().channel(), lookupFromIndexService, matchFields.getFirst().type(), - localSourceExec.index().name(), + indexName, join.leftFields().getFirst().name(), join.addedFields().stream().map(f -> (NamedExpression) f).toList(), join.source() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 1e0374c648579..76744957ff5fc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -263,7 +263,7 @@ public final void test() throws Throwable { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V8.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V9.capabilityName()) ); assumeFalse( "can't use TERM function in csv tests", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 674eda8916c5a..be15bb7de8b44 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2140,7 +2140,7 @@ public void testLookupMatchTypeWrong() { } public void testLookupJoinUnknownIndex() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); String errorMessage = "Unknown index [foobar]"; IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage); @@ -2169,7 +2169,7 @@ public void testLookupJoinUnknownIndex() { } public void testLookupJoinUnknownField() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name"; String errorMessage = "1:45: Unknown column [last_name] in right side of join"; @@ -2192,7 +2192,7 @@ public void testLookupJoinUnknownField() { } public void testMultipleLookupJoinsGiveDifferentAttributes() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); // The field attributes that get contributed by different LOOKUP JOIN commands must have different name ids, // even if they have the same names. Otherwise, things like dependency analysis - like in PruneColumns - cannot work based on @@ -2222,7 +2222,7 @@ public void testMultipleLookupJoinsGiveDifferentAttributes() { } public void testLookupJoinIndexMode() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); var indexResolution = AnalyzerTestUtils.expandedDefaultIndexResolution(); var lookupResolution = AnalyzerTestUtils.defaultLookupResolution(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 549ddce03c206..2f6cf46f2e2b1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -113,7 +113,7 @@ public void testTooBigQuery() { } public void testJoinOnConstant() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertEquals( "1:55: JOIN ON clause only supports fields at the moment, found [123]", error("row languages = 1, gender = \"f\" | lookup join test on 123") @@ -129,7 +129,7 @@ public void testJoinOnConstant() { } public void testJoinOnMultipleFields() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertEquals( "1:35: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on gender, languages") @@ -137,7 +137,7 @@ public void testJoinOnMultipleFields() { } public void testJoinTwiceOnTheSameField() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertEquals( "1:35: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on languages, languages") @@ -145,7 +145,7 @@ public void testJoinTwiceOnTheSameField() { } public void testJoinTwiceOnTheSameField_TwoLookups() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertEquals( "1:80: JOIN ON clause only supports one field at the moment, found [2]", error("row languages = 1, gender = \"f\" | lookup join test on languages | eval x = 1 | lookup join test on gender, gender") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 43d764ab2007d..533cc59b824ce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1974,7 +1974,7 @@ public void testSortByAggregate() { } public void testLookupJoinDataTypeMismatch() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 7e65cb045b26e..672eef7076c64 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -4927,7 +4927,7 @@ public void testPlanSanityCheck() throws Exception { } public void testPlanSanityCheckWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); var plan = optimizedPlan(""" FROM test @@ -6003,7 +6003,7 @@ public void testLookupStats() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); String query = """ FROM test @@ -6045,7 +6045,7 @@ public void testLookupJoinPushDownFilterOnJoinKeyWithRename() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownFilterOnLeftSideField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); String query = """ FROM test @@ -6088,7 +6088,7 @@ public void testLookupJoinPushDownFilterOnLeftSideField() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19] */ public void testLookupJoinPushDownDisabledForLookupField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); String query = """ FROM test @@ -6132,7 +6132,7 @@ public void testLookupJoinPushDownDisabledForLookupField() { * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownSeparatedForConjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); String query = """ FROM test @@ -6183,7 +6183,7 @@ public void testLookupJoinPushDownSeparatedForConjunctionBetweenLeftAndRightFiel * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20] */ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField() { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); String query = """ FROM test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index ac56d13f870f7..80f2772945e93 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -2615,7 +2615,7 @@ public void testVerifierOnMissingReferences() { } public void testVerifierOnMissingReferencesWithBinaryPlans() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); // Do not assert serialization: // This will have a LookupJoinExec, which is not serializable because it doesn't leave the coordinator. @@ -7298,7 +7298,7 @@ public void testLookupThenTopN() { } public void testLookupJoinFieldLoading() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz"))); @@ -7375,7 +7375,7 @@ public void testLookupJoinFieldLoading() throws Exception { } public void testLookupJoinFieldLoadingTwoLookups() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); TestDataSource data = dataSetWithLookupIndices( Map.of( @@ -7429,7 +7429,7 @@ public void testLookupJoinFieldLoadingTwoLookups() throws Exception { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/119082") public void testLookupJoinFieldLoadingTwoLookupsProjectInBetween() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); TestDataSource data = dataSetWithLookupIndices( Map.of( @@ -7470,7 +7470,7 @@ public void testLookupJoinFieldLoadingTwoLookupsProjectInBetween() throws Except @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/118778") public void testLookupJoinFieldLoadingDropAllFields() throws Exception { - assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz"))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 60bdf4e7f73d3..b344bd6b63255 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -1365,7 +1365,7 @@ public void testMetrics() { } public void testLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( "FROM employees | KEEP languages | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code", Set.of("languages", "languages.*", "language_code", "language_code.*"), @@ -1374,7 +1374,7 @@ public void testLookupJoin() { } public void testLookupJoinKeep() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM employees @@ -1388,7 +1388,7 @@ public void testLookupJoinKeep() { } public void testLookupJoinKeepWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM employees @@ -1402,7 +1402,7 @@ public void testLookupJoinKeepWildcard() { } public void testMultiLookupJoin() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1415,7 +1415,7 @@ public void testMultiLookupJoin() { } public void testMultiLookupJoinKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1429,7 +1429,7 @@ public void testMultiLookupJoinKeepBefore() { } public void testMultiLookupJoinKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1454,7 +1454,7 @@ public void testMultiLookupJoinKeepBetween() { } public void testMultiLookupJoinKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1481,7 +1481,7 @@ public void testMultiLookupJoinKeepAfter() { } public void testMultiLookupJoinKeepAfterWildcard() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1495,7 +1495,7 @@ public void testMultiLookupJoinKeepAfterWildcard() { } public void testMultiLookupJoinSameIndex() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1509,7 +1509,7 @@ public void testMultiLookupJoinSameIndex() { } public void testMultiLookupJoinSameIndexKeepBefore() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1524,7 +1524,7 @@ public void testMultiLookupJoinSameIndexKeepBefore() { } public void testMultiLookupJoinSameIndexKeepBetween() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data @@ -1550,7 +1550,7 @@ public void testMultiLookupJoinSameIndexKeepBetween() { } public void testMultiLookupJoinSameIndexKeepAfter() { - assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()); + assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()); assertFieldNames( """ FROM sample_data diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 5e6c4d53f4c58..cdc6d9b2dff5f 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -34,6 +34,7 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -341,31 +342,44 @@ protected Map infer(String modelId, List input) throws I return inferInternal(endpoint, input, null, Map.of()); } - protected Deque streamInferOnMockService(String modelId, TaskType taskType, List input) throws Exception { + protected Deque streamInferOnMockService( + String modelId, + TaskType taskType, + List input, + @Nullable Consumer responseConsumerCallback + ) throws Exception { var endpoint = Strings.format("_inference/%s/%s/_stream", taskType, modelId); - return callAsync(endpoint, input); + return callAsync(endpoint, input, responseConsumerCallback); } - protected Deque unifiedCompletionInferOnMockService(String modelId, TaskType taskType, List input) - throws Exception { + protected Deque unifiedCompletionInferOnMockService( + String modelId, + TaskType taskType, + List input, + @Nullable Consumer responseConsumerCallback + ) throws Exception { var endpoint = Strings.format("_inference/%s/%s/_unified", taskType, modelId); - return callAsyncUnified(endpoint, input, "user"); + return callAsyncUnified(endpoint, input, "user", responseConsumerCallback); } - private Deque callAsync(String endpoint, List input) throws Exception { + private Deque callAsync(String endpoint, List input, @Nullable Consumer responseConsumerCallback) + throws Exception { var request = new Request("POST", endpoint); request.setJsonEntity(jsonBody(input, null)); - return execAsyncCall(request); + return execAsyncCall(request, responseConsumerCallback); } - private Deque execAsyncCall(Request request) throws Exception { + private Deque execAsyncCall(Request request, @Nullable Consumer responseConsumerCallback) throws Exception { var responseConsumer = new AsyncInferenceResponseConsumer(); request.setOptions(RequestOptions.DEFAULT.toBuilder().setHttpAsyncResponseConsumerFactory(() -> responseConsumer).build()); var latch = new CountDownLatch(1); client().performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(Response response) { + if (responseConsumerCallback != null) { + responseConsumerCallback.accept(response); + } latch.countDown(); } @@ -378,11 +392,16 @@ public void onFailure(Exception exception) { return responseConsumer.events(); } - private Deque callAsyncUnified(String endpoint, List input, String role) throws Exception { + private Deque callAsyncUnified( + String endpoint, + List input, + String role, + @Nullable Consumer responseConsumerCallback + ) throws Exception { var request = new Request("POST", endpoint); request.setJsonEntity(createUnifiedJsonBody(input, role)); - return execAsyncCall(request); + return execAsyncCall(request, responseConsumerCallback); } private String createUnifiedJsonBody(List input, String role) throws IOException { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index fc593a6a8b0fa..49fce930cd726 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.inference; import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -28,6 +29,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -37,9 +39,15 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; public class InferenceCrudIT extends InferenceBaseRestTest { + private static final Consumer VALIDATE_ELASTIC_PRODUCT_HEADER_CONSUMER = (r) -> assertThat( + r.getHeader("X-elastic-product"), + is("Elasticsearch") + ); + @SuppressWarnings("unchecked") public void testCRUD() throws IOException { for (int i = 0; i < 5; i++) { @@ -442,7 +450,7 @@ public void testUnsupportedStream() throws Exception { assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get("task_type")); try { - var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomUUID())); + var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomUUID()), null); assertThat(events.size(), equalTo(2)); events.forEach(event -> { switch (event.name()) { @@ -469,7 +477,7 @@ public void testSupportedStream() throws Exception { var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphanumericOfLength(5)).toList(); try { - var events = streamInferOnMockService(modelId, TaskType.COMPLETION, input); + var events = streamInferOnMockService(modelId, TaskType.COMPLETION, input, VALIDATE_ELASTIC_PRODUCT_HEADER_CONSUMER); var expectedResponses = Stream.concat( input.stream().map(s -> s.toUpperCase(Locale.ROOT)).map(str -> "{\"completion\":[{\"delta\":\"" + str + "\"}]}"), @@ -496,7 +504,7 @@ public void testUnifiedCompletionInference() throws Exception { var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphanumericOfLength(5)).toList(); try { - var events = unifiedCompletionInferOnMockService(modelId, TaskType.COMPLETION, input); + var events = unifiedCompletionInferOnMockService(modelId, TaskType.COMPLETION, input, VALIDATE_ELASTIC_PRODUCT_HEADER_CONSUMER); var expectedResponses = expectedResultsIterator(input); assertThat(events.size(), equalTo((input.size() + 1) * 2)); events.forEach(event -> { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index ab3f466f3c11f..b993cf36cb875 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -17,6 +17,7 @@ import org.apache.http.nio.util.SimpleInputBuffer; import org.apache.http.protocol.HttpContext; import org.apache.http.util.EntityUtils; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -43,6 +44,7 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.external.response.streaming.ServerSentEvent; @@ -52,6 +54,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Collection; +import java.util.Collections; import java.util.Deque; import java.util.Iterator; import java.util.List; @@ -96,6 +99,14 @@ protected Collection> nodePlugins() { } public static class StreamingPlugin extends Plugin implements ActionPlugin { + private final SetOnce threadPool = new SetOnce<>(); + + @Override + public Collection createComponents(PluginServices services) { + threadPool.set(services.threadPool()); + return Collections.emptyList(); + } + @Override public Collection getRestHandlers( Settings settings, @@ -122,7 +133,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c var publisher = new RandomPublisher(requestCount, withError); var inferenceServiceResults = new StreamingInferenceServiceResults(publisher); var inferenceResponse = new InferenceAction.Response(inferenceServiceResults, inferenceServiceResults.publisher()); - new ServerSentEventsRestActionListener(channel).onResponse(inferenceResponse); + new ServerSentEventsRestActionListener(channel, threadPool).onResponse(inferenceResponse); } }, new RestHandler() { @Override @@ -132,7 +143,7 @@ public List routes() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { - new ServerSentEventsRestActionListener(channel).onFailure(expectedException); + new ServerSentEventsRestActionListener(channel, threadPool).onFailure(expectedException); } }, new RestHandler() { @Override @@ -143,7 +154,7 @@ public List routes() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { var inferenceResponse = new InferenceAction.Response(new SingleInferenceServiceResults()); - new ServerSentEventsRestActionListener(channel).onResponse(inferenceResponse); + new ServerSentEventsRestActionListener(channel, threadPool).onResponse(inferenceResponse); } }); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 72fa840ad19b0..ac225800ad1b7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -43,6 +43,7 @@ import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -154,6 +155,9 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); + // This is mainly so that the rest handlers can access the ThreadPool in a way that avoids potential null pointers from it + // not being initialized yet + private final SetOnce threadPoolSetOnce = new SetOnce<>(); private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); private final SetOnce shardBulkInferenceActionFilter = new SetOnce<>(); @@ -199,7 +203,7 @@ public List getRestHandlers( ) { var availableRestActions = List.of( new RestInferenceAction(), - new RestStreamInferenceAction(), + new RestStreamInferenceAction(threadPoolSetOnce), new RestGetInferenceModelAction(), new RestPutInferenceModelAction(), new RestUpdateInferenceModelAction(), @@ -208,7 +212,7 @@ public List getRestHandlers( new RestGetInferenceServicesAction() ); List conditionalRestActions = UnifiedCompletionFeature.UNIFIED_COMPLETION_FEATURE_FLAG.isEnabled() - ? List.of(new RestUnifiedCompletionInferenceAction()) + ? List.of(new RestUnifiedCompletionInferenceAction(threadPoolSetOnce)) : List.of(); return Stream.concat(availableRestActions.stream(), conditionalRestActions.stream()).toList(); @@ -219,6 +223,7 @@ public Collection createComponents(PluginServices services) { var throttlerManager = new ThrottlerManager(settings, services.threadPool(), services.clusterService()); var truncator = new Truncator(settings, services.clusterService()); serviceComponents.set(new ServiceComponents(services.threadPool(), throttlerManager, settings, truncator)); + threadPoolSetOnce.set(services.threadPool()); var httpClientManager = HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager); var httpRequestSenderFactory = new HttpRequestSender.Factory(serviceComponents.get(), httpClientManager, services.clusterService()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntity.java index 50339bf851f7d..f28c1b3fe8a55 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntity.java @@ -78,6 +78,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.endArray(); } + case null -> { + // do nothing + } } builder.field(ROLE_FIELD, message.role()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java index 875c288da52bd..881af435b29b6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceAction.java @@ -7,13 +7,16 @@ package org.elasticsearch.xpack.inference.rest; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import java.util.List; +import java.util.Objects; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.inference.rest.Paths.STREAM_INFERENCE_ID_PATH; @@ -21,6 +24,13 @@ @ServerlessScope(Scope.PUBLIC) public class RestStreamInferenceAction extends BaseInferenceAction { + private final SetOnce threadPool; + + public RestStreamInferenceAction(SetOnce threadPool) { + super(); + this.threadPool = Objects.requireNonNull(threadPool); + } + @Override public String getName() { return "stream_inference_action"; @@ -38,6 +48,6 @@ protected InferenceAction.Request prepareInferenceRequest(InferenceAction.Reques @Override protected ActionListener listener(RestChannel channel) { - return new ServerSentEventsRestActionListener(channel); + return new ServerSentEventsRestActionListener(channel, threadPool); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java index 5c71b560a6b9d..51f1bc48c8306 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceAction.java @@ -7,15 +7,18 @@ package org.elasticsearch.xpack.inference.rest; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import java.io.IOException; import java.util.List; +import java.util.Objects; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.inference.rest.Paths.UNIFIED_INFERENCE_ID_PATH; @@ -23,6 +26,13 @@ @ServerlessScope(Scope.PUBLIC) public class RestUnifiedCompletionInferenceAction extends BaseRestHandler { + private final SetOnce threadPool; + + public RestUnifiedCompletionInferenceAction(SetOnce threadPool) { + super(); + this.threadPool = Objects.requireNonNull(threadPool); + } + @Override public String getName() { return "unified_inference_action"; @@ -44,6 +54,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient request = UnifiedCompletionAction.Request.parseRequest(params.inferenceEntityId(), params.taskType(), inferTimeout, parser); } - return channel -> client.execute(UnifiedCompletionAction.INSTANCE, request, new ServerSentEventsRestActionListener(channel)); + return channel -> client.execute( + UnifiedCompletionAction.INSTANCE, + request, + new ServerSentEventsRestActionListener(channel, threadPool) + ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index bf94f072b6e04..042c8b8a8346d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -10,9 +10,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.BytesStream; @@ -29,6 +31,7 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.inference.action.InferenceAction; @@ -38,6 +41,7 @@ import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.Map; +import java.util.Objects; import java.util.concurrent.Flow; import java.util.concurrent.atomic.AtomicBoolean; @@ -55,6 +59,7 @@ public class ServerSentEventsRestActionListener implements ActionListener threadPool; /** * A listener for the first part of the next entry to become available for transmission. @@ -66,13 +71,14 @@ public class ServerSentEventsRestActionListener implements ActionListener nextBodyPartListener; - public ServerSentEventsRestActionListener(RestChannel channel) { - this(channel, channel.request()); + public ServerSentEventsRestActionListener(RestChannel channel, SetOnce threadPool) { + this(channel, channel.request(), threadPool); } - public ServerSentEventsRestActionListener(RestChannel channel, ToXContent.Params params) { + public ServerSentEventsRestActionListener(RestChannel channel, ToXContent.Params params, SetOnce threadPool) { this.channel = channel; this.params = params; + this.threadPool = Objects.requireNonNull(threadPool); } @Override @@ -99,7 +105,7 @@ protected void ensureOpen() { } private void initializeStream(InferenceAction.Response response) { - nextBodyPartListener = ActionListener.wrap(bodyPart -> { + ActionListener chunkedResponseBodyActionListener = ActionListener.wrap(bodyPart -> { // this is the first response, so we need to send the RestResponse to open the stream // all subsequent bytes will be delivered through the nextBodyPartListener channel.sendResponse(RestResponse.chunked(RestStatus.OK, bodyPart, this::release)); @@ -115,6 +121,12 @@ private void initializeStream(InferenceAction.Response response) { ) ); }); + + nextBodyPartListener = ContextPreservingActionListener.wrapPreservingContext( + chunkedResponseBodyActionListener, + threadPool.get().getThreadContext() + ); + // subscribe will call onSubscribe, which requests the first chunk response.publisher().subscribe(subscriber); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java index f945c154ea234..2037c77a3cf2a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java @@ -702,122 +702,62 @@ public void testSerializationWithBooleanFields() throws IOException { assertJsonEquals(expectedJsonFalse, jsonStringFalse); } - // 9. Serialization with Missing Required Fields - // Test with missing required fields to ensure appropriate exceptions are thrown. - public void testSerializationWithMissingRequiredFields() { - // Create a message with missing content (required field) + // 9. a test without the content field to show that the content field is optional + public void testSerializationWithoutContentField() throws IOException { UnifiedCompletionRequest.Message message = new UnifiedCompletionRequest.Message( - null, // missing content - OpenAiUnifiedChatCompletionRequestEntity.USER_FIELD, - null, null, - null - ); - var messageList = new ArrayList(); - messageList.add(message); - // Create the unified request - UnifiedCompletionRequest unifiedRequest = new UnifiedCompletionRequest( - messageList, - null, // model - null, // maxCompletionTokens - null, // stop - null, // temperature - null, // toolChoice - null, // tools - null // topP - ); - - // Create the unified chat input - UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); - - // Create the entity - OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); - - // Attempt to serialize to XContent and expect an exception - try { - XContentBuilder builder = JsonXContent.contentBuilder(); - entity.toXContent(builder, ToXContent.EMPTY_PARAMS); - fail("Expected an exception due to missing required fields"); - } catch (NullPointerException | IOException e) { - // Expected exception - } - } - - // 10. Serialization with Mixed Valid and Invalid Data - // Test with a mix of valid and invalid data to ensure the serializer handles it gracefully. - public void testSerializationWithMixedValidAndInvalidData() throws IOException { - // Create a valid message - UnifiedCompletionRequest.Message validMessage = new UnifiedCompletionRequest.Message( - new UnifiedCompletionRequest.ContentString("Valid content"), - OpenAiUnifiedChatCompletionRequestEntity.USER_FIELD, - "validName", - "validToolCallId", - Collections.singletonList( - new UnifiedCompletionRequest.ToolCall( - "validId", - new UnifiedCompletionRequest.ToolCall.FunctionField("validArguments", "validFunctionName"), - "validType" - ) - ) - ); - - // Create an invalid message with null content - UnifiedCompletionRequest.Message invalidMessage = new UnifiedCompletionRequest.Message( - null, // invalid content - OpenAiUnifiedChatCompletionRequestEntity.USER_FIELD, - "invalidName", - "invalidToolCallId", + "assistant", + "name\nwith\nnewlines", + "tool_call_id\twith\ttabs", Collections.singletonList( new UnifiedCompletionRequest.ToolCall( - "invalidId", - new UnifiedCompletionRequest.ToolCall.FunctionField("invalidArguments", "invalidFunctionName"), - "invalidType" + "id\\with\\backslashes", + new UnifiedCompletionRequest.ToolCall.FunctionField("arguments\"with\"quotes", "function_name/with/slashes"), + "type" ) ) ); var messageList = new ArrayList(); - messageList.add(validMessage); - messageList.add(invalidMessage); - // Create the unified request with both valid and invalid messages - UnifiedCompletionRequest unifiedRequest = new UnifiedCompletionRequest( - messageList, - "model-name", - 100L, // maxCompletionTokens - Collections.singletonList("stop"), - 0.9f, // temperature - new UnifiedCompletionRequest.ToolChoiceString("tool_choice"), - Collections.singletonList( - new UnifiedCompletionRequest.Tool( - "type", - new UnifiedCompletionRequest.Tool.FunctionField( - "Fetches the weather in the given location", - "get_weather", - createParameters(), - true - ) - ) - ), - 0.8f // topP - ); + messageList.add(message); + UnifiedCompletionRequest unifiedRequest = new UnifiedCompletionRequest(messageList, null, null, null, null, null, null, null); - // Create the unified chat input UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); + OpenAiChatCompletionModel model = createChatCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); - - // Create the entity OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); - // Serialize to XContent and verify - try { - XContentBuilder builder = JsonXContent.contentBuilder(); - entity.toXContent(builder, ToXContent.EMPTY_PARAMS); - fail("Expected an exception due to invalid data"); - } catch (NullPointerException | IOException e) { - // Expected exception - } + XContentBuilder builder = JsonXContent.contentBuilder(); + entity.toXContent(builder, ToXContent.EMPTY_PARAMS); + + String jsonString = Strings.toString(builder); + String expectedJson = """ + { + "messages": [ + { + "role": "assistant", + "name": "name\\nwith\\nnewlines", + "tool_call_id": "tool_call_id\\twith\\ttabs", + "tool_calls": [ + { + "id": "id\\\\with\\\\backslashes", + "function": { + "arguments": "arguments\\"with\\"quotes", + "name": "function_name/with/slashes" + }, + "type": "type" + } + ] + } + ], + "model": "test-endpoint", + "n": 1, + "stream": true, + "stream_options": { + "include_usage": true + } + } + """; + assertJsonEquals(jsonString, expectedJson); } public static Map createParameters() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java index b999e2c9b72f0..f67680ef6b625 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestStreamInferenceActionTests.java @@ -12,8 +12,11 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.junit.After; import org.junit.Before; import static org.elasticsearch.xpack.inference.rest.BaseInferenceActionTests.createResponse; @@ -22,10 +25,18 @@ import static org.hamcrest.Matchers.instanceOf; public class RestStreamInferenceActionTests extends RestActionTestCase { + private final SetOnce threadPool = new SetOnce<>(); @Before public void setUpAction() { - controller().registerHandler(new RestStreamInferenceAction()); + threadPool.set(new TestThreadPool(getTestName())); + controller().registerHandler(new RestStreamInferenceAction(threadPool)); + } + + @After + public void tearDownAction() { + terminate(threadPool.get()); + } public void testStreamIsTrue() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java index 5acfe67b175df..9dc23c890c14d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rest/RestUnifiedCompletionInferenceActionTests.java @@ -17,8 +17,11 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; +import org.junit.After; import org.junit.Before; import static org.elasticsearch.xpack.inference.rest.BaseInferenceActionTests.createResponse; @@ -27,10 +30,17 @@ import static org.hamcrest.Matchers.instanceOf; public class RestUnifiedCompletionInferenceActionTests extends RestActionTestCase { + private final SetOnce threadPool = new SetOnce<>(); @Before public void setUpAction() { - controller().registerHandler(new RestUnifiedCompletionInferenceAction()); + threadPool.set(new TestThreadPool(getTestName())); + controller().registerHandler(new RestUnifiedCompletionInferenceAction(threadPool)); + } + + @After + public void tearDownAction() { + terminate(threadPool.get()); } public void testStreamIsTrue() { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml index 5b39f74de1b9d..57d2dac23026b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml @@ -6,7 +6,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [join_lookup_v8] + capabilities: [join_lookup_v9] reason: "uses LOOKUP JOIN" - do: indices.create: @@ -32,6 +32,13 @@ setup: type: long color: type: keyword + - do: + indices.update_aliases: + body: + actions: + - add: + index: test-lookup + alias: test-lookup-alias - do: bulk: index: "test" @@ -75,3 +82,45 @@ non-lookup index: - match: { error.type: "verification_exception" } - contains: { error.reason: "Found 1 problem\nline 1:43: invalid [test] resolution in lookup mode to an index in [standard] mode" } + +--- +alias: + - do: + esql.query: + body: + query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + + - match: {columns.0.name: "key"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {values.0: [1, "cyan"]} + - match: {values.1: [2, "yellow"]} + +--- +alias-repeated-alias: + - do: + esql.query: + body: + query: 'FROM test-lookup-alias | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + + - match: {columns.0.name: "key"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {values.0: [1, "cyan"]} + - match: {values.1: [2, "yellow"]} + +--- +alias-repeated-index: + - do: + esql.query: + body: + query: 'FROM test-lookup | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + + - match: {columns.0.name: "key"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {values.0: [1, "cyan"]} + - match: {values.1: [2, "yellow"]} diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index fe4c1c20c69c4..516dd4759861f 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -209,7 +209,6 @@ public void testWatcher() throws Exception { } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/84700") public void testWatcherWithApiKey() throws Exception { final Request getWatchStatusRequest = new Request("GET", "/_watcher/watch/watch_with_api_key");