From 8aa3185e18bc725c1aa74ee55dcdf3c0cdc512e3 Mon Sep 17 00:00:00 2001 From: Sooraj Sinha <81695996+soosinha@users.noreply.github.com> Date: Wed, 18 Dec 2024 11:42:21 +0530 Subject: [PATCH 01/27] Change Remote state read thread pool to Fixed type (#16850) * Change Remote state read thread pool to Fixed type Signed-off-by: Sooraj Sinha --- .../TransportClusterManagerNodeAction.java | 9 ++++++++- .../coordination/PublicationTransportHandler.java | 2 +- .../gateway/remote/RemoteClusterStateService.java | 14 ++++++++++++++ .../java/org/opensearch/threadpool/ThreadPool.java | 4 ++-- .../remote/RemoteClusterStateServiceTests.java | 8 ++++++++ .../threadpool/ScalingThreadPoolTests.java | 1 - 6 files changed, 33 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java b/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java index 819e09312a0df..558b7370749d5 100644 --- a/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java +++ b/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java @@ -430,6 +430,13 @@ private ClusterState getStateFromLocalNode(GetTermVersionResponse termVersionRes if (remoteClusterStateService != null && termVersionResponse.isStatePresentInRemote()) { try { + logger.info( + () -> new ParameterizedMessage( + "Term version checker downloading full cluster state for term {}, version {}", + termVersion.getTerm(), + termVersion.getVersion() + ) + ); ClusterStateTermVersion clusterStateTermVersion = termVersionResponse.getClusterStateTermVersion(); Optional clusterMetadataManifest = remoteClusterStateService .getClusterMetadataManifestByTermVersion( @@ -454,7 +461,7 @@ private ClusterState getStateFromLocalNode(GetTermVersionResponse termVersionRes return clusterStateFromRemote; } } catch (Exception e) { - logger.trace("Error while fetching from remote cluster state", e); + logger.error("Error while fetching from remote cluster state", e); } } return null; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java index 7275d72f2db9f..4ad5b80038048 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java @@ -258,7 +258,7 @@ PublishWithJoinResponse handleIncomingRemotePublishRequest(RemotePublishRequest } if (applyFullState == true) { - logger.debug( + logger.info( () -> new ParameterizedMessage( "Downloading full cluster state for term {}, version {}, stateUUID {}", manifest.getClusterTerm(), diff --git a/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java b/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java index e7eac0ae67714..778ab3e56cf76 100644 --- a/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java +++ b/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java @@ -1494,8 +1494,22 @@ public ClusterState getClusterStateForManifest( try { ClusterState stateFromCache = remoteClusterStateCache.getState(clusterName, manifest); if (stateFromCache != null) { + logger.trace( + () -> new ParameterizedMessage( + "Found cluster state in cache for term {} and version {}", + manifest.getClusterTerm(), + manifest.getStateVersion() + ) + ); return stateFromCache; } + logger.info( + () -> new ParameterizedMessage( + "Cluster state not found in cache for term {} and version {}", + manifest.getClusterTerm(), + manifest.getStateVersion() + ) + ); final ClusterState clusterState; final long startTimeNanos = relativeTimeNanosSupplier.getAsLong(); diff --git a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java index 269a4c87dfb72..59d3b110aeca8 100644 --- a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java @@ -198,7 +198,7 @@ public static ThreadPoolType fromType(String type) { map.put(Names.REMOTE_PURGE, ThreadPoolType.SCALING); map.put(Names.REMOTE_REFRESH_RETRY, ThreadPoolType.SCALING); map.put(Names.REMOTE_RECOVERY, ThreadPoolType.SCALING); - map.put(Names.REMOTE_STATE_READ, ThreadPoolType.SCALING); + map.put(Names.REMOTE_STATE_READ, ThreadPoolType.FIXED); map.put(Names.INDEX_SEARCHER, ThreadPoolType.RESIZABLE); map.put(Names.REMOTE_STATE_CHECKSUM, ThreadPoolType.FIXED); THREAD_POOL_TYPES = Collections.unmodifiableMap(map); @@ -306,7 +306,7 @@ public ThreadPool( ); builders.put( Names.REMOTE_STATE_READ, - new ScalingExecutorBuilder(Names.REMOTE_STATE_READ, 1, boundedBy(4 * allocatedProcessors, 4, 32), TimeValue.timeValueMinutes(5)) + new FixedExecutorBuilder(settings, Names.REMOTE_STATE_READ, boundedBy(4 * allocatedProcessors, 4, 32), 120000) ); builders.put( Names.INDEX_SEARCHER, diff --git a/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java b/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java index be07aa0d05e9f..e3684178a18ea 100644 --- a/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java +++ b/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java @@ -2354,6 +2354,14 @@ public void testReadLatestClusterStateFromCache() throws IOException { .getState(clusterState.getClusterName().value(), expectedManifest); assertEquals(stateFromCache.getMetadata(), state.getMetadata()); + ClusterState stateFromCache2 = remoteClusterStateService.getClusterStateForManifest( + clusterState.getClusterName().value(), + expectedManifest, + "nodeA", + true + ); + assertEquals(stateFromCache2.getMetadata(), state.getMetadata()); + final ClusterMetadataManifest notExistMetadata = ClusterMetadataManifest.builder() .indices(List.of()) .clusterTerm(1L) diff --git a/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java b/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java index b4726bab50198..23c21648b1263 100644 --- a/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java +++ b/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java @@ -156,7 +156,6 @@ private int expectedSize(final String threadPoolName, final int numberOfProcesso sizes.put(ThreadPool.Names.REMOTE_PURGE, ThreadPool::halfAllocatedProcessors); sizes.put(ThreadPool.Names.REMOTE_REFRESH_RETRY, ThreadPool::halfAllocatedProcessors); sizes.put(ThreadPool.Names.REMOTE_RECOVERY, ThreadPool::twiceAllocatedProcessors); - sizes.put(ThreadPool.Names.REMOTE_STATE_READ, n -> ThreadPool.boundedBy(4 * n, 4, 32)); return sizes.get(threadPoolName).apply(numberOfProcessors); } From b5f651f648d955b2e943fe3a17a95e8fb33a3980 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 18 Dec 2024 09:20:59 -0500 Subject: [PATCH 02/27] [Backport] [2.18] Update Apache Lucene to 9.12.1 (#16846) (#16870) (#16877) * Update Apache Lucene to 9.12.1 (#16846) Signed-off-by: Andriy Redko (cherry picked from commit bc4f44bd4d2a0bec6d6ded797e63ba99df6197a6) * Added bwc version 1.3.21 (#16845) Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Signed-off-by: Andriy Redko Co-authored-by: opensearch-ci-bot <83309141+opensearch-ci-bot@users.noreply.github.com> --------- Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Signed-off-by: Andriy Redko Co-authored-by: opensearch-ci-bot <83309141+opensearch-ci-bot@users.noreply.github.com> (cherry picked from commit 97f6d84d306521947d621e93085f340e5e666c65) --- libs/core/src/main/java/org/opensearch/Version.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 62c9cb36727b2..dd804fcc6db70 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -112,8 +112,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_2_17_1 = new Version(2170199, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_17_2 = new Version(2170299, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_18_0 = new Version(2180099, org.apache.lucene.util.Version.LUCENE_9_12_0); - public static final Version V_2_18_1 = new Version(2180199, org.apache.lucene.util.Version.LUCENE_9_12_0); - public static final Version V_2_19_0 = new Version(2190099, org.apache.lucene.util.Version.LUCENE_9_12_0); + public static final Version V_2_18_1 = new Version(2180199, org.apache.lucene.util.Version.LUCENE_9_12_1); + public static final Version V_2_19_0 = new Version(2190099, org.apache.lucene.util.Version.LUCENE_9_12_1); public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_12_1); public static final Version CURRENT = V_3_0_0; From e6d71d290e56e93920f8b8efa5ddbf885699607e Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Thu, 19 Dec 2024 17:58:09 +0530 Subject: [PATCH 03/27] Changes to support IP field in star tree indexing (#16641) * Changes to support IP --------- Signed-off-by: bharath-techie --- .../index/mapper/StarTreeMapperIT.java | 22 +++-- .../Composite912DocValuesWriter.java | 30 +++++-- .../datacube/DimensionFactory.java | 15 ++-- .../datacube/DimensionType.java | 12 ++- .../compositeindex/datacube/IpDimension.java | 82 +++++++++++++++++++ ...rdDimension.java => OrdinalDimension.java} | 10 +-- .../index/mapper/IpFieldMapper.java | 7 ++ .../index/mapper/KeywordFieldMapper.java | 2 +- .../StarTreeKeywordDocValuesFormatTests.java | 37 +++++++-- .../StarTreeBuilderFlushFlowTests.java | 11 +-- .../StarTreeBuilderMergeFlowTests.java | 2 +- .../builder/StarTreeBuilderTestCase.java | 9 +- .../index/mapper/StarTreeMapperTests.java | 8 +- 13 files changed, 204 insertions(+), 43 deletions(-) create mode 100644 server/src/main/java/org/opensearch/index/compositeindex/datacube/IpDimension.java rename server/src/main/java/org/opensearch/index/compositeindex/datacube/{KeywordDimension.java => OrdinalDimension.java} (87%) diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java index c91c4d7bbb63b..3f9053576329c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java @@ -56,7 +56,7 @@ public class StarTreeMapperIT extends OpenSearchIntegTestCase { .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)) .build(); - private static XContentBuilder createMinimalTestMapping(boolean invalidDim, boolean invalidMetric, boolean ipdim) { + private static XContentBuilder createMinimalTestMapping(boolean invalidDim, boolean invalidMetric, boolean wildcard) { try { return jsonBuilder().startObject() .startObject("composite") @@ -68,7 +68,7 @@ private static XContentBuilder createMinimalTestMapping(boolean invalidDim, bool .endObject() .startArray("ordered_dimensions") .startObject() - .field("name", getDim(invalidDim, ipdim)) + .field("name", getDim(invalidDim, wildcard)) .endObject() .startObject() .field("name", "keyword_dv") @@ -102,8 +102,16 @@ private static XContentBuilder createMinimalTestMapping(boolean invalidDim, bool .field("type", "keyword") .field("doc_values", false) .endObject() + .startObject("ip_no_dv") + .field("type", "ip") + .field("doc_values", false) + .endObject() .startObject("ip") .field("type", "ip") + .field("doc_values", true) + .endObject() + .startObject("wildcard") + .field("type", "wildcard") .field("doc_values", false) .endObject() .endObject() @@ -362,11 +370,11 @@ private XContentBuilder getMappingWithDuplicateFields(boolean isDuplicateDim, bo return mapping; } - private static String getDim(boolean hasDocValues, boolean isKeyword) { + private static String getDim(boolean hasDocValues, boolean isWildCard) { if (hasDocValues) { - return random().nextBoolean() ? "numeric" : "keyword"; - } else if (isKeyword) { - return "ip"; + return random().nextBoolean() ? "numeric" : random().nextBoolean() ? "keyword" : "ip_no_dv"; + } else if (isWildCard) { + return "wildcard"; } return "numeric_dv"; } @@ -748,7 +756,7 @@ public void testUnsupportedDim() { () -> prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createMinimalTestMapping(false, false, true)).get() ); assertEquals( - "Failed to parse mapping [_doc]: unsupported field type associated with dimension [ip] as part of star tree field [startree-1]", + "Failed to parse mapping [_doc]: unsupported field type associated with dimension [wildcard] as part of star tree field [startree-1]", ex.getMessage() ); } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java index 904d6a7aba5c6..ca52d8bf4bca0 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java @@ -33,9 +33,11 @@ import org.opensearch.index.compositeindex.datacube.startree.builder.StarTreesBuilder; import org.opensearch.index.compositeindex.datacube.startree.index.CompositeIndexValues; import org.opensearch.index.compositeindex.datacube.startree.index.StarTreeValues; +import org.opensearch.index.fielddata.IndexNumericFieldData; +import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.opensearch.index.mapper.CompositeMappedFieldType; import org.opensearch.index.mapper.DocCountFieldMapper; -import org.opensearch.index.mapper.KeywordFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import java.io.IOException; @@ -44,6 +46,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -262,22 +265,38 @@ public SortedSetDocValues getSortedSet(FieldInfo field) { return DocValues.emptySortedSet(); } }); - } - // TODO : change this logic to evaluate for sortedNumericField specifically - else { + } else if (isSortedNumericField(compositeField)) { fieldProducerMap.put(compositeField, new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) { return DocValues.emptySortedNumeric(); } }); + } else { + throw new IllegalStateException( + String.format(Locale.ROOT, "Unsupported DocValues field associated with the composite field : %s", compositeField) + ); } } compositeFieldSet.remove(compositeField); } private boolean isSortedSetField(String field) { - return mapperService.fieldType(field) instanceof KeywordFieldMapper.KeywordFieldType; + MappedFieldType ft = mapperService.fieldType(field); + assert ft.isAggregatable(); + return ft.fielddataBuilder( + "", + () -> { throw new UnsupportedOperationException("SearchLookup not available"); } + ) instanceof SortedSetOrdinalsIndexFieldData.Builder; + } + + private boolean isSortedNumericField(String field) { + MappedFieldType ft = mapperService.fieldType(field); + assert ft.isAggregatable(); + return ft.fielddataBuilder( + "", + () -> { throw new UnsupportedOperationException("SearchLookup not available"); } + ) instanceof IndexNumericFieldData.Builder; } @Override @@ -370,5 +389,4 @@ private static SegmentWriteState getSegmentWriteState(SegmentWriteState segmentW segmentWriteState.segmentSuffix ); } - } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java index e834706e2fa9d..b1e78d78d3ad2 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java @@ -24,7 +24,8 @@ import java.util.stream.Collectors; import static org.opensearch.index.compositeindex.datacube.DateDimension.CALENDAR_INTERVALS; -import static org.opensearch.index.compositeindex.datacube.KeywordDimension.KEYWORD; +import static org.opensearch.index.compositeindex.datacube.IpDimension.IP; +import static org.opensearch.index.compositeindex.datacube.OrdinalDimension.ORDINAL; /** * Dimension factory class mainly used to parse and create dimension from the mappings @@ -44,8 +45,10 @@ public static Dimension parseAndCreateDimension( return parseAndCreateDateDimension(name, dimensionMap, c); case NumericDimension.NUMERIC: return new NumericDimension(name); - case KEYWORD: - return new KeywordDimension(name); + case ORDINAL: + return new OrdinalDimension(name); + case IP: + return new IpDimension(name); default: throw new IllegalArgumentException( String.format(Locale.ROOT, "unsupported field type associated with dimension [%s] as part of star tree field", name) @@ -69,8 +72,10 @@ public static Dimension parseAndCreateDimension( return parseAndCreateDateDimension(name, dimensionMap, c); case NUMERIC: return new NumericDimension(name); - case KEYWORD: - return new KeywordDimension(name); + case ORDINAL: + return new OrdinalDimension(name); + case IP: + return new IpDimension(name); default: throw new IllegalArgumentException( String.format(Locale.ROOT, "unsupported field type associated with star tree dimension [%s]", name) diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java index d327f8ca1fa1e..f7911e72f36fc 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java @@ -30,8 +30,14 @@ public enum DimensionType { DATE, /** - * Represents a keyword dimension type. - * This is used for dimensions that contain keyword ordinals. + * Represents dimension types which uses ordinals. + * This is used for dimensions that contain sortedSet ordinals. */ - KEYWORD + ORDINAL, + + /** + * Represents an IP dimension type. + * This is used for dimensions that contain IP ordinals. + */ + IP } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/IpDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/IpDimension.java new file mode 100644 index 0000000000000..9c3682bd2e0ea --- /dev/null +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/IpDimension.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.compositeindex.datacube; + +import org.apache.lucene.index.DocValuesType; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.mapper.CompositeDataCubeFieldType; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; + +/** + * Composite index keyword dimension class + * + * @opensearch.experimental + */ +@ExperimentalApi +public class IpDimension implements Dimension { + public static final String IP = "ip"; + private final String field; + + public IpDimension(String field) { + this.field = field; + } + + @Override + public String getField() { + return field; + } + + @Override + public int getNumSubDimensions() { + return 1; + } + + @Override + public void setDimensionValues(Long value, Consumer dimSetter) { + // This will set the keyword dimension value's ordinal + dimSetter.accept(value); + } + + @Override + public List getSubDimensionNames() { + return List.of(field); + } + + @Override + public DocValuesType getDocValuesType() { + return DocValuesType.SORTED_SET; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CompositeDataCubeFieldType.NAME, field); + builder.field(CompositeDataCubeFieldType.TYPE, IP); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IpDimension dimension = (IpDimension) o; + return Objects.equals(field, dimension.getField()); + } + + @Override + public int hashCode() { + return Objects.hash(field); + } +} diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/OrdinalDimension.java similarity index 87% rename from server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java rename to server/src/main/java/org/opensearch/index/compositeindex/datacube/OrdinalDimension.java index 58e248fd548d6..9cb4cd78bdaac 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/OrdinalDimension.java @@ -24,11 +24,11 @@ * @opensearch.experimental */ @ExperimentalApi -public class KeywordDimension implements Dimension { - public static final String KEYWORD = "keyword"; +public class OrdinalDimension implements Dimension { + public static final String ORDINAL = "ordinal"; private final String field; - public KeywordDimension(String field) { + public OrdinalDimension(String field) { this.field = field; } @@ -62,7 +62,7 @@ public DocValuesType getDocValuesType() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CompositeDataCubeFieldType.NAME, field); - builder.field(CompositeDataCubeFieldType.TYPE, KEYWORD); + builder.field(CompositeDataCubeFieldType.TYPE, ORDINAL); builder.endObject(); return builder; } @@ -71,7 +71,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - KeywordDimension dimension = (KeywordDimension) o; + OrdinalDimension dimension = (OrdinalDimension) o; return Objects.equals(field, dimension.getField()); } diff --git a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java index e23a48f94f450..1283aa302c111 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java @@ -52,6 +52,7 @@ import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.InetAddresses; import org.opensearch.common.network.NetworkAddress; +import org.opensearch.index.compositeindex.datacube.DimensionType; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; @@ -68,6 +69,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.function.BiFunction; import java.util.function.Supplier; @@ -161,6 +163,11 @@ public IpFieldMapper build(BuilderContext context) { ); } + @Override + public Optional getSupportedDataCubeDimensionType() { + return Optional.of(DimensionType.IP); + } + } public static final TypeParser PARSER = new TypeParser((n, c) -> { diff --git a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java index df14a5811f6a0..90e43c818e137 100644 --- a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java @@ -259,7 +259,7 @@ public KeywordFieldMapper build(BuilderContext context) { @Override public Optional getSupportedDataCubeDimensionType() { - return Optional.of(DimensionType.KEYWORD); + return Optional.of(DimensionType.ORDINAL); } } diff --git a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java index 402ed1dbee98a..5603fe4e30f9f 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; @@ -25,6 +26,7 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.opensearch.common.lucene.Lucene; +import org.opensearch.common.network.InetAddresses; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; @@ -36,6 +38,8 @@ import org.opensearch.index.mapper.NumberFieldMapper; import java.io.IOException; +import java.net.InetAddress; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -65,12 +69,15 @@ public void testStarTreeKeywordDocValues() throws IOException { doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); iw.addDocument(doc); doc = new Document(); doc.add(new StringField("_id", "2", Field.Store.NO)); doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); + iw.addDocument(doc); iw.flush(); iw.deleteDocuments(new Term("_id", "2")); @@ -80,12 +87,14 @@ public void testStarTreeKeywordDocValues() throws IOException { doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); iw.addDocument(doc); doc = new Document(); doc.add(new StringField("_id", "4", Field.Store.NO)); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); iw.addDocument(doc); iw.flush(); iw.deleteDocuments(new Term("_id", "4")); @@ -166,6 +175,9 @@ public void testStarTreeKeywordDocValuesWithDeletions() throws IOException { doc.add(new SortedSetDocValuesField("keyword2", new BytesRef(keyword2Value))); map.put(keyword1Value + "-" + keyword2Value, sndvValue + map.getOrDefault(keyword1Value + "-" + keyword2Value, 0)); + doc.add( + new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10." + i)))) + ); iw.addDocument(doc); documents.put(id, doc); } @@ -221,9 +233,7 @@ public void testStarTreeKeywordDocValuesWithDeletions() throws IOException { SortedSetStarTreeValuesIterator k1 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator( "keyword1" ); - SortedSetStarTreeValuesIterator k2 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator( - "keyword2" - ); + SortedSetStarTreeValuesIterator k2 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator("ip1"); for (StarTreeDocument starDoc : actualStarTreeDocuments) { String keyword1 = null; if (starDoc.dimensions[0] != null) { @@ -232,7 +242,11 @@ public void testStarTreeKeywordDocValuesWithDeletions() throws IOException { String keyword2 = null; if (starDoc.dimensions[1] != null) { - keyword2 = k2.lookupOrd(starDoc.dimensions[1]).utf8ToString(); + BytesRef encoded = k2.lookupOrd(starDoc.dimensions[1]); + InetAddress address = InetAddressPoint.decode( + Arrays.copyOfRange(encoded.bytes, encoded.offset, encoded.offset + encoded.length) + ); + keyword2 = InetAddresses.toAddrString(address); } double metric = (double) starDoc.metrics[0]; if (map.containsKey(keyword1 + "-" + keyword2)) { @@ -254,21 +268,28 @@ public void testStarKeywordDocValuesWithMissingDocs() throws IOException { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); + iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); iw.addDocument(doc); iw.forceMerge(1); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); + iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); + iw.addDocument(doc); iw.forceMerge(1); iw.close(); @@ -340,11 +361,14 @@ public void testStarKeywordDocValuesWithMissingDocsInSegment() throws IOExceptio doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); + iw.addDocument(doc); iw.forceMerge(1); iw.close(); @@ -538,7 +562,7 @@ protected XContentBuilder getMapping() throws IOException { b.field("name", "keyword1"); b.endObject(); b.startObject(); - b.field("name", "keyword2"); + b.field("name", "ip1"); b.endObject(); b.endArray(); b.startArray("metrics"); @@ -566,6 +590,9 @@ protected XContentBuilder getMapping() throws IOException { b.startObject("keyword2"); b.field("type", "keyword"); b.endObject(); + b.startObject("ip1"); + b.field("type", "ip"); + b.endObject(); b.endObject(); }); } diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java index 440268f1f803c..70cc20fe4a9f6 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java @@ -20,10 +20,11 @@ import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.datacube.Dimension; -import org.opensearch.index.compositeindex.datacube.KeywordDimension; +import org.opensearch.index.compositeindex.datacube.IpDimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; +import org.opensearch.index.compositeindex.datacube.OrdinalDimension; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; @@ -426,7 +427,7 @@ public void testFlushFlowForKeywords() throws IOException { ); List metricsWithField = List.of(0, 1, 2, 3, 4, 5); - compositeField = getStarTreeFieldWithKeywordField(); + compositeField = getStarTreeFieldWithKeywordField(random().nextBoolean()); SortedSetStarTreeValuesIterator d1sndv = new SortedSetStarTreeValuesIterator(getSortedSetMock(dimList, docsWithField)); SortedSetStarTreeValuesIterator d2sndv = new SortedSetStarTreeValuesIterator(getSortedSetMock(dimList2, docsWithField2)); SortedNumericStarTreeValuesIterator m1sndv = new SortedNumericStarTreeValuesIterator( @@ -531,9 +532,9 @@ private StarTreeField getStarTreeFieldWithMultipleMetrics() { return new StarTreeField("sf", dims, metrics, c); } - private StarTreeField getStarTreeFieldWithKeywordField() { - Dimension d1 = new KeywordDimension("field1"); - Dimension d2 = new KeywordDimension("field3"); + private StarTreeField getStarTreeFieldWithKeywordField(boolean isIp) { + Dimension d1 = isIp ? new IpDimension("field1") : new OrdinalDimension("field1"); + Dimension d2 = isIp ? new IpDimension("field3") : new OrdinalDimension("field3"); Metric m1 = new Metric("field2", List.of(MetricStat.SUM)); Metric m2 = new Metric("field2", List.of(MetricStat.VALUE_COUNT)); Metric m3 = new Metric("field2", List.of(MetricStat.AVG)); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java index be16961e781db..74ecff04076b1 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java @@ -1831,7 +1831,7 @@ public void testMergeFlowWithKeywords() throws IOException { List metricsList2 = List.of(0L, 1L, 2L, 3L, 4L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - compositeField = getStarTreeFieldWithKeywords(); + compositeField = getStarTreeFieldWithKeywords(random().nextBoolean()); StarTreeValues starTreeValues = getStarTreeValuesWithKeywords( getSortedSetMock(dimList, docsWithField), getSortedSetMock(dimList2, docsWithField2), diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java index 9c9beaea4f52c..cca987b6f9b16 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java @@ -32,10 +32,11 @@ import org.opensearch.index.compositeindex.datacube.DataCubeDateTimeUnit; import org.opensearch.index.compositeindex.datacube.DateDimension; import org.opensearch.index.compositeindex.datacube.Dimension; -import org.opensearch.index.compositeindex.datacube.KeywordDimension; +import org.opensearch.index.compositeindex.datacube.IpDimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; +import org.opensearch.index.compositeindex.datacube.OrdinalDimension; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; @@ -352,9 +353,9 @@ protected StarTreeMetadata getStarTreeMetadata( ); } - protected StarTreeField getStarTreeFieldWithKeywords() { - Dimension d1 = new KeywordDimension("field1"); - Dimension d2 = new KeywordDimension("field3"); + protected StarTreeField getStarTreeFieldWithKeywords(boolean ip) { + Dimension d1 = ip ? new IpDimension("field1") : new OrdinalDimension("field1"); + Dimension d2 = ip ? new IpDimension("field3") : new OrdinalDimension("field3"); Metric m1 = new Metric("field2", List.of(MetricStat.VALUE_COUNT, MetricStat.SUM)); List dims = List.of(d1, d2); List metrics = List.of(m1); diff --git a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java index 8ec34b3eb660c..333cdbcab05c5 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java @@ -1085,6 +1085,9 @@ private XContentBuilder getInvalidMapping( b.startObject(); b.field("name", "keyword1"); b.endObject(); + b.startObject(); + b.field("name", "ip1"); + b.endObject(); } b.endArray(); b.startArray("metrics"); @@ -1117,7 +1120,7 @@ private XContentBuilder getInvalidMapping( if (!invalidDimType) { b.field("type", "integer"); } else { - b.field("type", "ip"); + b.field("type", "wildcard"); } b.endObject(); b.startObject("metric_field"); @@ -1130,6 +1133,9 @@ private XContentBuilder getInvalidMapping( b.startObject("keyword1"); b.field("type", "keyword"); b.endObject(); + b.startObject("ip1"); + b.field("type", "ip"); + b.endObject(); b.endObject(); }); } From f4fd707469d49f9de3f76cbde60567e36ef5f7bd Mon Sep 17 00:00:00 2001 From: Andrew Ross Date: Thu, 19 Dec 2024 15:35:13 -0600 Subject: [PATCH 04/27] Remove the events-correlation-engine plugin (#16885) We [reached agreement][1] to move this plugin to a separate repository. The implementation was never completed here, and this code was never backported to any release, so it is safe to remove. [1]: https://github.com/opensearch-project/OpenSearch/pull/7771#issuecomment-2193974273 Signed-off-by: Andrew Ross --- gradle/missing-javadoc.gradle | 1 - .../events-correlation-engine/build.gradle | 21 - .../EventsCorrelationPluginTransportIT.java | 177 -------- .../CorrelationVectorsEngineIT.java | 312 -------------- .../EventsCorrelationPluginRestIT.java | 154 ------- .../correlation/EventsCorrelationPlugin.java | 142 ------- .../core/index/CorrelationParamsContext.java | 148 ------- .../correlation/core/index/VectorField.java | 51 --- .../BasePerFieldCorrelationVectorsFormat.java | 104 ----- .../index/codec/CorrelationCodecService.java | 38 -- .../index/codec/CorrelationCodecVersion.java | 103 ----- .../correlation990/CorrelationCodec.java | 46 -- .../PerFieldCorrelationVectorsFormat.java | 35 -- .../codec/correlation990/package-info.java | 12 - .../core/index/codec/package-info.java | 12 - .../mapper/CorrelationVectorFieldMapper.java | 173 -------- .../core/index/mapper/VectorFieldMapper.java | 399 ------------------ .../core/index/mapper/package-info.java | 12 - .../correlation/core/index/package-info.java | 12 - .../index/query/CorrelationQueryBuilder.java | 332 --------------- .../index/query/CorrelationQueryFactory.java | 142 ------- .../core/index/query/package-info.java | 12 - .../plugin/correlation/package-info.java | 12 - .../action/IndexCorrelationRuleAction.java | 32 -- .../action/IndexCorrelationRuleRequest.java | 101 ----- .../action/IndexCorrelationRuleResponse.java | 94 ----- .../rules/action/package-info.java | 12 - .../rules/model/CorrelationQuery.java | 197 --------- .../rules/model/CorrelationRule.java | 244 ----------- .../correlation/rules/model/package-info.java | 12 - .../RestIndexCorrelationRuleAction.java | 111 ----- .../rules/resthandler/package-info.java | 12 - .../TransportIndexCorrelationRuleAction.java | 234 ---------- .../rules/transport/package-info.java | 12 - .../settings/EventsCorrelationSettings.java | 47 --- .../correlation/settings/package-info.java | 12 - .../utils/CorrelationRuleIndices.java | 83 ---- .../plugin/correlation/utils/IndexUtils.java | 139 ------ .../correlation/utils/package-info.java | 12 - .../services/org.apache.lucene.codecs.Codec | 1 - .../resources/mappings/correlation-rules.json | 60 --- .../EventsCorrelationPluginTests.java | 19 - .../index/CorrelationParamsContextTests.java | 170 -------- .../core/index/VectorFieldTests.java | 83 ---- .../correlation990/CorrelationCodecTests.java | 121 ------ .../CorrelationVectorFieldMapperTests.java | 310 -------------- .../query/CorrelationQueryBuilderTests.java | 269 ------------ .../EventsCorrelationSettingsTests.java | 58 --- 48 files changed, 4895 deletions(-) delete mode 100644 plugins/events-correlation-engine/build.gradle delete mode 100644 plugins/events-correlation-engine/src/internalClusterTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTransportIT.java delete mode 100644 plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java delete mode 100644 plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginRestIT.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/EventsCorrelationPlugin.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContext.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/VectorField.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/BasePerFieldCorrelationVectorsFormat.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecService.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapper.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/VectorFieldMapper.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilder.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryFactory.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleAction.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleRequest.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleResponse.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationQuery.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationRule.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/RestIndexCorrelationRuleAction.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/TransportIndexCorrelationRuleAction.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettings.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/CorrelationRuleIndices.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/IndexUtils.java delete mode 100644 plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/package-info.java delete mode 100644 plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec delete mode 100644 plugins/events-correlation-engine/src/main/resources/mappings/correlation-rules.json delete mode 100644 plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTests.java delete mode 100644 plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContextTests.java delete mode 100644 plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/VectorFieldTests.java delete mode 100644 plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java delete mode 100644 plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapperTests.java delete mode 100644 plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java delete mode 100644 plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettingsTests.java diff --git a/gradle/missing-javadoc.gradle b/gradle/missing-javadoc.gradle index 751da941d25dd..5a98a60e806ea 100644 --- a/gradle/missing-javadoc.gradle +++ b/gradle/missing-javadoc.gradle @@ -170,7 +170,6 @@ configure([ project(":libs:opensearch-common"), project(":libs:opensearch-core"), project(":libs:opensearch-compress"), - project(":plugins:events-correlation-engine"), project(":server") ]) { project.tasks.withType(MissingJavadocTask) { diff --git a/plugins/events-correlation-engine/build.gradle b/plugins/events-correlation-engine/build.gradle deleted file mode 100644 index c3eff30012b1d..0000000000000 --- a/plugins/events-correlation-engine/build.gradle +++ /dev/null @@ -1,21 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -apply plugin: 'opensearch.java-rest-test' -apply plugin: 'opensearch.internal-cluster-test' - -opensearchplugin { - description 'OpenSearch Events Correlation Engine.' - classname 'org.opensearch.plugin.correlation.EventsCorrelationPlugin' -} - -dependencies { -} diff --git a/plugins/events-correlation-engine/src/internalClusterTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTransportIT.java b/plugins/events-correlation-engine/src/internalClusterTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTransportIT.java deleted file mode 100644 index 028848a91213e..0000000000000 --- a/plugins/events-correlation-engine/src/internalClusterTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTransportIT.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.apache.lucene.search.join.ScoreMode; -import org.opensearch.action.admin.cluster.node.info.NodeInfo; -import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.index.query.NestedQueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleRequest; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleResponse; -import org.opensearch.plugin.correlation.rules.model.CorrelationQuery; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.plugins.Plugin; -import org.opensearch.plugins.PluginInfo; -import org.opensearch.rest.RestRequest; -import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.OpenSearchIntegTestCase; -import org.junit.Assert; - -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Transport Action tests for events-correlation-plugin - */ -public class EventsCorrelationPluginTransportIT extends OpenSearchIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(EventsCorrelationPlugin.class); - } - - /** - * test events-correlation-plugin is installed - */ - public void testPluginsAreInstalled() { - NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); - nodesInfoRequest.addMetric(NodesInfoRequest.Metric.PLUGINS.metricName()); - NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); - List pluginInfos = nodesInfoResponse.getNodes() - .stream() - .flatMap( - (Function>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream() - ) - .collect(Collectors.toList()); - Assert.assertTrue( - pluginInfos.stream() - .anyMatch(pluginInfo -> pluginInfo.getName().equals("org.opensearch.plugin.correlation.EventsCorrelationPlugin")) - ); - } - - /** - * test creating a correlation rule - * @throws Exception Exception - */ - public void testCreatingACorrelationRule() throws Exception { - List correlationQueries = Arrays.asList( - new CorrelationQuery("s3_access_logs", "aws.cloudtrail.eventName:ReplicateObject", "@timestamp", List.of("s3")), - new CorrelationQuery("app_logs", "keywords:PermissionDenied", "@timestamp", List.of("others_application")) - ); - CorrelationRule correlationRule = new CorrelationRule("s3 to app logs", correlationQueries); - IndexCorrelationRuleRequest request = new IndexCorrelationRuleRequest(correlationRule, RestRequest.Method.POST); - - IndexCorrelationRuleResponse response = client().execute(IndexCorrelationRuleAction.INSTANCE, request).get(); - Assert.assertEquals(RestStatus.CREATED, response.getStatus()); - - NestedQueryBuilder queryBuilder = QueryBuilders.nestedQuery( - "correlate", - QueryBuilders.matchQuery("correlate.index", "s3_access_logs"), - ScoreMode.None - ); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.fetchSource(true); - - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices(CorrelationRule.CORRELATION_RULE_INDEX); - searchRequest.source(searchSourceBuilder); - - SearchResponse searchResponse = client().search(searchRequest).get(); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - /** - * test filtering correlation rules - * @throws Exception Exception - */ - public void testFilteringCorrelationRules() throws Exception { - List correlationQueries1 = Arrays.asList( - new CorrelationQuery("s3_access_logs", "aws.cloudtrail.eventName:ReplicateObject", "@timestamp", List.of("s3")), - new CorrelationQuery("app_logs", "keywords:PermissionDenied", "@timestamp", List.of("others_application")) - ); - CorrelationRule correlationRule1 = new CorrelationRule("s3 to app logs", correlationQueries1); - IndexCorrelationRuleRequest request1 = new IndexCorrelationRuleRequest(correlationRule1, RestRequest.Method.POST); - client().execute(IndexCorrelationRuleAction.INSTANCE, request1).get(); - - List correlationQueries2 = Arrays.asList( - new CorrelationQuery("windows", "host.hostname:EC2AMAZ*", "@timestamp", List.of("windows")), - new CorrelationQuery("app_logs", "endpoint:/customer_records.txt", "@timestamp", List.of("others_application")) - ); - CorrelationRule correlationRule2 = new CorrelationRule("windows to app logs", correlationQueries2); - IndexCorrelationRuleRequest request2 = new IndexCorrelationRuleRequest(correlationRule2, RestRequest.Method.POST); - client().execute(IndexCorrelationRuleAction.INSTANCE, request2).get(); - - NestedQueryBuilder queryBuilder = QueryBuilders.nestedQuery( - "correlate", - QueryBuilders.matchQuery("correlate.index", "s3_access_logs"), - ScoreMode.None - ); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.fetchSource(true); - - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices(CorrelationRule.CORRELATION_RULE_INDEX); - searchRequest.source(searchSourceBuilder); - - SearchResponse searchResponse = client().search(searchRequest).get(); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - /** - * test creating a correlation rule with no timestamp field - * @throws Exception Exception - */ - @SuppressWarnings("unchecked") - public void testCreatingACorrelationRuleWithNoTimestampField() throws Exception { - List correlationQueries = Arrays.asList( - new CorrelationQuery("s3_access_logs", "aws.cloudtrail.eventName:ReplicateObject", null, List.of("s3")), - new CorrelationQuery("app_logs", "keywords:PermissionDenied", null, List.of("others_application")) - ); - CorrelationRule correlationRule = new CorrelationRule("s3 to app logs", correlationQueries); - IndexCorrelationRuleRequest request = new IndexCorrelationRuleRequest(correlationRule, RestRequest.Method.POST); - - IndexCorrelationRuleResponse response = client().execute(IndexCorrelationRuleAction.INSTANCE, request).get(); - Assert.assertEquals(RestStatus.CREATED, response.getStatus()); - - NestedQueryBuilder queryBuilder = QueryBuilders.nestedQuery( - "correlate", - QueryBuilders.matchQuery("correlate.index", "s3_access_logs"), - ScoreMode.None - ); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.fetchSource(true); - - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices(CorrelationRule.CORRELATION_RULE_INDEX); - searchRequest.source(searchSourceBuilder); - - SearchResponse searchResponse = client().search(searchRequest).get(); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - Assert.assertEquals( - "_timestamp", - ((List>) (searchResponse.getHits().getHits()[0].getSourceAsMap().get("correlate"))).get(0) - .get("timestampField") - ); - } -} diff --git a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java deleted file mode 100644 index 414fe1948f053..0000000000000 --- a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java +++ /dev/null @@ -1,312 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.apache.hc.core5.http.Header; -import org.apache.hc.core5.http.HttpEntity; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.client.Request; -import org.opensearch.client.RequestOptions; -import org.opensearch.client.Response; -import org.opensearch.client.ResponseException; -import org.opensearch.client.RestClient; -import org.opensearch.client.WarningsHandler; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.core.common.Strings; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.MediaTypeRegistry; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.IndexSettings; -import org.opensearch.test.rest.OpenSearchRestTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * Correlation Vectors Engine e2e tests - */ -public class CorrelationVectorsEngineIT extends OpenSearchRestTestCase { - - private static final int DIMENSION = 4; - private static final String PROPERTIES_FIELD_NAME = "properties"; - private static final String TYPE_FIELD_NAME = "type"; - private static final String CORRELATION_VECTOR_TYPE = "correlation_vector"; - private static final String DIMENSION_FIELD_NAME = "dimension"; - private static final int M = 16; - private static final int EF_CONSTRUCTION = 128; - private static final String INDEX_NAME = "test-index-1"; - private static final Float[][] TEST_VECTORS = new Float[][] { - { 1.0f, 1.0f, 1.0f, 1.0f }, - { 2.0f, 2.0f, 2.0f, 2.0f }, - { 3.0f, 3.0f, 3.0f, 3.0f } }; - private static final float[][] TEST_QUERY_VECTORS = new float[][] { - { 1.0f, 1.0f, 1.0f, 1.0f }, - { 2.0f, 2.0f, 2.0f, 2.0f }, - { 3.0f, 3.0f, 3.0f, 3.0f } }; - private static final Map> VECTOR_SIMILARITY_TO_SCORE = Map.of( - VectorSimilarityFunction.EUCLIDEAN, - (similarity) -> 1 / (1 + similarity), - VectorSimilarityFunction.DOT_PRODUCT, - (similarity) -> (1 + similarity) / 2, - VectorSimilarityFunction.COSINE, - (similarity) -> (1 + similarity) / 2 - ); - - /** - * test the e2e storage and query layer of events-correlation-engine - * @throws IOException IOException - */ - @SuppressWarnings("unchecked") - public void testQuery() throws IOException { - String textField = "text-field"; - String luceneField = "lucene-field"; - XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject(PROPERTIES_FIELD_NAME) - .startObject(textField) - .field(TYPE_FIELD_NAME, "text") - .endObject() - .startObject(luceneField) - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, DIMENSION) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", M) - .field("ef_construction", EF_CONSTRUCTION) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - - String mapping = builder.toString(); - createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings()); - - for (int idx = 0; idx < TEST_VECTORS.length; ++idx) { - addCorrelationDoc( - INDEX_NAME, - String.valueOf(idx + 1), - List.of(textField, luceneField), - List.of(java.util.UUID.randomUUID().toString(), TEST_VECTORS[idx]) - ); - } - refreshAllIndices(); - Assert.assertEquals(TEST_VECTORS.length, getDocCount(INDEX_NAME)); - - int k = 2; - for (float[] query : TEST_QUERY_VECTORS) { - - String correlationQuery = "{\n" - + " \"query\": {\n" - + " \"correlation\": {\n" - + " \"lucene-field\": {\n" - + " \"vector\": \n" - + Arrays.toString(query) - + " ,\n" - + " \"k\": 2,\n" - + " \"boost\": 1\n" - + " }\n" - + " }\n" - + " }\n" - + "}"; - - Response response = searchCorrelationIndex(INDEX_NAME, correlationQuery, k); - Map responseBody = entityAsMap(response); - Assert.assertEquals(2, ((List) ((Map) responseBody.get("hits")).get("hits")).size()); - @SuppressWarnings("unchecked") - double actualScore1 = Double.parseDouble( - ((List>) ((Map) responseBody.get("hits")).get("hits")).get(0).get("_score").toString() - ); - @SuppressWarnings("unchecked") - double actualScore2 = Double.parseDouble( - ((List>) ((Map) responseBody.get("hits")).get("hits")).get(1).get("_score").toString() - ); - @SuppressWarnings("unchecked") - List hit1 = ((Map>) ((List>) ((Map) responseBody.get("hits")) - .get("hits")).get(0).get("_source")).get(luceneField).stream().map(Double::floatValue).collect(Collectors.toList()); - float[] resultVector1 = new float[hit1.size()]; - for (int i = 0; i < hit1.size(); ++i) { - resultVector1[i] = hit1.get(i); - } - - @SuppressWarnings("unchecked") - List hit2 = ((Map>) ((List>) ((Map) responseBody.get("hits")) - .get("hits")).get(1).get("_source")).get(luceneField).stream().map(Double::floatValue).collect(Collectors.toList()); - float[] resultVector2 = new float[hit2.size()]; - for (int i = 0; i < hit2.size(); ++i) { - resultVector2[i] = hit2.get(i); - } - - double rawScore1 = VectorSimilarityFunction.EUCLIDEAN.compare(resultVector1, query); - Assert.assertEquals(rawScore1, actualScore1, 0.0001); - double rawScore2 = VectorSimilarityFunction.EUCLIDEAN.compare(resultVector2, query); - Assert.assertEquals(rawScore2, actualScore2, 0.0001); - } - } - - /** - * unhappy test for the e2e storage and query layer of events-correlation-engine with no index exist - */ - public void testQueryWithNoIndexExist() { - float[] query = new float[] { 1.0f, 1.0f, 1.0f, 1.0f }; - String correlationQuery = "{\n" - + " \"query\": {\n" - + " \"correlation\": {\n" - + " \"lucene-field\": {\n" - + " \"vector\": \n" - + Arrays.toString(query) - + " ,\n" - + " \"k\": 2,\n" - + " \"boost\": 1\n" - + " }\n" - + " }\n" - + " }\n" - + "}"; - Exception ex = assertThrows(ResponseException.class, () -> { searchCorrelationIndex(INDEX_NAME, correlationQuery, 2); }); - String expectedMessage = String.format(Locale.ROOT, "no such index [%s]", INDEX_NAME); - String actualMessage = ex.getMessage(); - Assert.assertTrue(actualMessage.contains(expectedMessage)); - } - - /** - * unhappy test for the e2e storage and query layer of events-correlation-engine with wrong mapping - */ - public void testQueryWithWrongMapping() throws IOException { - String textField = "text-field"; - String luceneField = "lucene-field"; - XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject(PROPERTIES_FIELD_NAME) - .startObject(textField) - .field(TYPE_FIELD_NAME, "text") - .endObject() - .startObject(luceneField) - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field("test", DIMENSION) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", M) - .field("ef_construction", EF_CONSTRUCTION) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - - String mapping = builder.toString(); - Exception ex = assertThrows(ResponseException.class, () -> { - createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings()); - }); - - String expectedMessage = String.format( - Locale.ROOT, - "unknown parameter [test] on mapper [%s] of type [correlation_vector]", - luceneField - ); - String actualMessage = ex.getMessage(); - Assert.assertTrue(actualMessage.contains(expectedMessage)); - } - - private String createTestIndexWithMappingJson(RestClient client, String index, String mapping, Settings settings) throws IOException { - Request request = new Request("PUT", "/" + index); - String entity = "{\"settings\": " + Strings.toString(MediaTypeRegistry.JSON, settings); - if (mapping != null) { - entity = entity + ",\"mappings\" : " + mapping; - } - - entity = entity + "}"; - if (!settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) { - expectSoftDeletesWarning(request, index); - } - - request.setJsonEntity(entity); - client.performRequest(request); - return index; - } - - private Settings getCorrelationDefaultIndexSettings() { - return Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).put("index.correlation", true).build(); - } - - private void addCorrelationDoc(String index, String docId, List fieldNames, List vectors) throws IOException { - Request request = new Request("POST", "/" + index + "/_doc/" + docId + "?refresh=true"); - - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - for (int i = 0; i < fieldNames.size(); i++) { - builder.field(fieldNames.get(i), vectors.get(i)); - } - builder.endObject(); - - request.setJsonEntity(builder.toString()); - Response response = client().performRequest(request); - assertEquals(request.getEndpoint() + ": failed", RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); - } - - private Response searchCorrelationIndex(String index, String correlationQuery, int resultSize) throws IOException { - Request request = new Request("POST", "/" + index + "/_search"); - - request.addParameter("size", Integer.toString(resultSize)); - request.addParameter("explain", Boolean.toString(true)); - request.addParameter("search_type", "query_then_fetch"); - request.setJsonEntity(correlationQuery); - - Response response = client().performRequest(request); - Assert.assertEquals("Search failed", RestStatus.OK, restStatus(response)); - return response; - } - - private int getDocCount(String index) throws IOException { - Response response = makeRequest( - client(), - "GET", - String.format(Locale.getDefault(), "/%s/_count", index), - Collections.emptyMap(), - null - ); - Assert.assertEquals(RestStatus.OK, restStatus(response)); - return Integer.parseInt(entityAsMap(response).get("count").toString()); - } - - private Response makeRequest( - RestClient client, - String method, - String endpoint, - Map params, - HttpEntity entity, - Header... headers - ) throws IOException { - Request request = new Request(method, endpoint); - RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); - options.setWarningsHandler(WarningsHandler.PERMISSIVE); - - for (Header header : headers) { - options.addHeader(header.getName(), header.getValue()); - } - request.setOptions(options.build()); - request.addParameters(params); - if (entity != null) { - request.setEntity(entity); - } - return client.performRequest(request); - } - - private RestStatus restStatus(Response response) { - return RestStatus.fromCode(response.getStatusLine().getStatusCode()); - } -} diff --git a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginRestIT.java b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginRestIT.java deleted file mode 100644 index 3791a5cdf5db0..0000000000000 --- a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginRestIT.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Request; -import org.opensearch.client.Response; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.test.rest.OpenSearchRestTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * Rest Action tests for events-correlation-plugin - */ -public class EventsCorrelationPluginRestIT extends OpenSearchRestTestCase { - - /** - * test events-correlation-plugin is installed - * @throws IOException IOException - */ - @SuppressWarnings("unchecked") - public void testPluginsAreInstalled() throws IOException { - Request request = new Request("GET", "/_cat/plugins?s=component&h=name,component,version,description&format=json"); - Response response = client().performRequest(request); - List pluginsList = JsonXContent.jsonXContent.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - response.getEntity().getContent() - ).list(); - Assert.assertTrue( - pluginsList.stream() - .map(o -> (Map) o) - .anyMatch(plugin -> plugin.get("component").equals("events-correlation-engine")) - ); - } - - /** - * test creating a correlation rule - * @throws IOException IOException - */ - public void testCreatingACorrelationRule() throws IOException { - Request request = new Request("POST", "/_correlation/rules"); - request.setJsonEntity(sampleCorrelationRule()); - Response response = client().performRequest(request); - - Assert.assertEquals(201, response.getStatusLine().getStatusCode()); - - Map responseMap = entityAsMap(response); - String id = responseMap.get("_id").toString(); - - request = new Request("POST", "/.opensearch-correlation-rules-config/_search"); - request.setJsonEntity(matchIdQuery(id)); - response = client().performRequest(request); - - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - SearchResponse searchResponse = SearchResponse.fromXContent( - createParser(JsonXContent.jsonXContent, response.getEntity().getContent()) - ); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - /** - * test creating a correlation rule with no timestamp field - * @throws IOException IOException - */ - @SuppressWarnings("unchecked") - public void testCreatingACorrelationRuleWithNoTimestampField() throws IOException { - Request request = new Request("POST", "/_correlation/rules"); - request.setJsonEntity(sampleCorrelationRuleWithNoTimestamp()); - Response response = client().performRequest(request); - - Assert.assertEquals(201, response.getStatusLine().getStatusCode()); - - Map responseMap = entityAsMap(response); - String id = responseMap.get("_id").toString(); - - request = new Request("POST", "/.opensearch-correlation-rules-config/_search"); - request.setJsonEntity(matchIdQuery(id)); - response = client().performRequest(request); - - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - SearchResponse searchResponse = SearchResponse.fromXContent( - createParser(JsonXContent.jsonXContent, response.getEntity().getContent()) - ); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - Assert.assertEquals( - "_timestamp", - ((List>) (searchResponse.getHits().getHits()[0].getSourceAsMap().get("correlate"))).get(0) - .get("timestampField") - ); - } - - private String sampleCorrelationRule() { - return "{\n" - + " \"name\": \"s3 to app logs\",\n" - + " \"correlate\": [\n" - + " {\n" - + " \"index\": \"s3_access_logs\",\n" - + " \"query\": \"aws.cloudtrail.eventName:ReplicateObject\",\n" - + " \"timestampField\": \"@timestamp\",\n" - + " \"tags\": [\n" - + " \"s3\"\n" - + " ]\n" - + " },\n" - + " {\n" - + " \"index\": \"app_logs\",\n" - + " \"query\": \"keywords:PermissionDenied\",\n" - + " \"timestampField\": \"@timestamp\",\n" - + " \"tags\": [\n" - + " \"others_application\"\n" - + " ]\n" - + " }\n" - + " ]\n" - + "}"; - } - - private String sampleCorrelationRuleWithNoTimestamp() { - return "{\n" - + " \"name\": \"s3 to app logs\",\n" - + " \"correlate\": [\n" - + " {\n" - + " \"index\": \"s3_access_logs\",\n" - + " \"query\": \"aws.cloudtrail.eventName:ReplicateObject\",\n" - + " \"tags\": [\n" - + " \"s3\"\n" - + " ]\n" - + " },\n" - + " {\n" - + " \"index\": \"app_logs\",\n" - + " \"query\": \"keywords:PermissionDenied\",\n" - + " \"tags\": [\n" - + " \"others_application\"\n" - + " ]\n" - + " }\n" - + " ]\n" - + "}"; - } - - private String matchIdQuery(String id) { - return "{\n" + " \"query\" : {\n" + " \"match\":{\n" + " \"_id\": \"" + id + "\"\n" + " }\n" + " }\n" + "}"; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/EventsCorrelationPlugin.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/EventsCorrelationPlugin.java deleted file mode 100644 index 9637042974d03..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/EventsCorrelationPlugin.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.opensearch.action.ActionRequest; -import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.settings.IndexScopedSettings; -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.settings.SettingsFilter; -import org.opensearch.core.action.ActionResponse; -import org.opensearch.core.common.io.stream.NamedWriteableRegistry; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.env.Environment; -import org.opensearch.env.NodeEnvironment; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.codec.CodecServiceFactory; -import org.opensearch.index.mapper.Mapper; -import org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecService; -import org.opensearch.plugin.correlation.core.index.mapper.CorrelationVectorFieldMapper; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; -import org.opensearch.plugin.correlation.core.index.query.CorrelationQueryBuilder; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.resthandler.RestIndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.transport.TransportIndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.settings.EventsCorrelationSettings; -import org.opensearch.plugin.correlation.utils.CorrelationRuleIndices; -import org.opensearch.plugins.ActionPlugin; -import org.opensearch.plugins.EnginePlugin; -import org.opensearch.plugins.MapperPlugin; -import org.opensearch.plugins.Plugin; -import org.opensearch.plugins.SearchPlugin; -import org.opensearch.repositories.RepositoriesService; -import org.opensearch.rest.RestController; -import org.opensearch.rest.RestHandler; -import org.opensearch.script.ScriptService; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.watcher.ResourceWatcherService; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.function.Supplier; - -/** - * Plugin class for events-correlation-engine - */ -public class EventsCorrelationPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, EnginePlugin { - - /** - * events-correlation-engine base uri - */ - public static final String PLUGINS_BASE_URI = "/_correlation"; - /** - * events-correlation-engine rules uri - */ - public static final String CORRELATION_RULES_BASE_URI = PLUGINS_BASE_URI + "/rules"; - - private CorrelationRuleIndices correlationRuleIndices; - - /** - * Default constructor - */ - public EventsCorrelationPlugin() {} - - @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier - ) { - correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); - return List.of(correlationRuleIndices); - } - - @Override - public List getRestHandlers( - Settings settings, - RestController restController, - ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, - SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster - ) { - return List.of(new RestIndexCorrelationRuleAction()); - } - - @Override - public Map getMappers() { - return Collections.singletonMap(CorrelationVectorFieldMapper.CONTENT_TYPE, new VectorFieldMapper.TypeParser()); - } - - @Override - public Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { - if (indexSettings.getValue(EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING)) { - return Optional.of(CorrelationCodecService::new); - } - return Optional.empty(); - } - - @Override - public List> getQueries() { - return Collections.singletonList( - new QuerySpec<>( - CorrelationQueryBuilder.NAME_FIELD.getPreferredName(), - CorrelationQueryBuilder::new, - CorrelationQueryBuilder::parse - ) - ); - } - - @Override - public List> getActions() { - return List.of(new ActionPlugin.ActionHandler<>(IndexCorrelationRuleAction.INSTANCE, TransportIndexCorrelationRuleAction.class)); - } - - @Override - public List> getSettings() { - return List.of(EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING, EventsCorrelationSettings.CORRELATION_TIME_WINDOW); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContext.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContext.java deleted file mode 100644 index fef9200a73091..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContext.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ToXContentFragment; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.mapper.MapperParsingException; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -/** - * Defines vector similarity function, m and ef_construction hyper parameters field mappings for correlation_vector type. - * - * @opensearch.internal - */ -public class CorrelationParamsContext implements ToXContentFragment, Writeable { - - /** - * Vector Similarity Function field - */ - public static final String VECTOR_SIMILARITY_FUNCTION = "similarityFunction"; - /** - * Parameters field to define m and ef_construction - */ - public static final String PARAMETERS = "parameters"; - - private final VectorSimilarityFunction similarityFunction; - private final Map parameters; - - /** - * Parameterized ctor for CorrelationParamsContext - * @param similarityFunction Vector Similarity Function - * @param parameters Parameters to define m and ef_construction - */ - public CorrelationParamsContext(VectorSimilarityFunction similarityFunction, Map parameters) { - this.similarityFunction = similarityFunction; - this.parameters = parameters; - } - - /** - * Parameterized ctor for CorrelationParamsContext - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationParamsContext(StreamInput sin) throws IOException { - this.similarityFunction = VectorSimilarityFunction.valueOf(sin.readString()); - if (sin.available() > 0) { - this.parameters = sin.readMap(); - } else { - this.parameters = null; - } - } - - /** - * Parse into CorrelationParamsContext - * @param in Object - * @return CorrelationParamsContext - */ - public static CorrelationParamsContext parse(Object in) { - if (!(in instanceof Map)) { - throw new MapperParsingException("Unable to parse CorrelationParamsContext"); - } - - @SuppressWarnings("unchecked") - Map contextMap = (Map) in; - VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.EUCLIDEAN; - Map parameters = new HashMap<>(); - - if (contextMap.containsKey(VECTOR_SIMILARITY_FUNCTION)) { - Object value = contextMap.get(VECTOR_SIMILARITY_FUNCTION); - - if (value != null && !(value instanceof String)) { - throw new MapperParsingException(String.format(Locale.getDefault(), "%s must be a string", VECTOR_SIMILARITY_FUNCTION)); - } - - try { - similarityFunction = VectorSimilarityFunction.valueOf((String) value); - } catch (IllegalArgumentException ex) { - throw new MapperParsingException(String.format(Locale.getDefault(), "Invalid %s: %s", VECTOR_SIMILARITY_FUNCTION, value)); - } - } - if (contextMap.containsKey(PARAMETERS)) { - Object value = contextMap.get(PARAMETERS); - if (!(value instanceof Map)) { - throw new MapperParsingException("Unable to parse parameters for Correlation context"); - } - - @SuppressWarnings("unchecked") - Map valueMap = (Map) value; - parameters.putAll(valueMap); - } - return new CorrelationParamsContext(similarityFunction, parameters); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VECTOR_SIMILARITY_FUNCTION, similarityFunction.name()); - if (params == null) { - builder.field(PARAMETERS, (String) null); - } else { - builder.startObject(PARAMETERS); - for (Map.Entry parameter : parameters.entrySet()) { - builder.field(parameter.getKey(), parameter.getValue()); - } - builder.endObject(); - } - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(similarityFunction.name()); - if (this.parameters != null) { - out.writeMap(parameters); - } - } - - /** - * get Vector Similarity Function - * @return Vector Similarity Function - */ - public VectorSimilarityFunction getSimilarityFunction() { - return similarityFunction; - } - - /** - * Get Parameters to define m and ef_construction - * @return Parameters to define m and ef_construction - */ - public Map getParameters() { - return parameters; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/VectorField.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/VectorField.java deleted file mode 100644 index 61efd6b9a87ae..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/VectorField.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexableFieldType; -import org.apache.lucene.util.BytesRef; -import org.opensearch.common.io.stream.BytesStreamOutput; - -import java.io.IOException; - -/** - * Generic Vector Field defining a correlation vector name, float array. - * - * @opensearch.internal - */ -public class VectorField extends Field { - - /** - * Parameterized ctor for VectorField - * @param name name of the field - * @param value float array value for the field - * @param type type of the field - */ - public VectorField(String name, float[] value, IndexableFieldType type) { - super(name, new BytesRef(), type); - try { - final byte[] floatToByte = floatToByteArray(value); - this.setBytesValue(floatToByte); - } catch (IOException ex) { - throw new RuntimeException(ex); - } - } - - /** - * converts float array based vector to byte array. - * @param input float array - * @return byte array - */ - protected static byte[] floatToByteArray(float[] input) throws IOException { - BytesStreamOutput objectStream = new BytesStreamOutput(); - objectStream.writeFloatArray(input); - return objectStream.bytes().toBytesRef().bytes; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/BasePerFieldCorrelationVectorsFormat.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/BasePerFieldCorrelationVectorsFormat.java deleted file mode 100644 index 00b55eb75995c..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/BasePerFieldCorrelationVectorsFormat.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec; - -import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.mapper.CorrelationVectorFieldMapper; - -import java.util.Locale; -import java.util.Map; -import java.util.Optional; -import java.util.function.BiFunction; -import java.util.function.Supplier; - -/** - * Class to define the hyper-parameters m and ef_construction for insert and store of correlation vectors into HNSW graphs based lucene index. - * - * @opensearch.internal - */ -public abstract class BasePerFieldCorrelationVectorsFormat extends PerFieldKnnVectorsFormat { - /** - * the hyper-parameters for constructing HNSW graphs. - * HnswGraph.html - */ - public static final String METHOD_PARAMETER_M = "m"; - /** - * the hyper-parameters for constructing HNSW graphs. - * HnswGraph.html - */ - public static final String METHOD_PARAMETER_EF_CONSTRUCTION = "ef_construction"; - - private final Optional mapperService; - private final int defaultMaxConnections; - private final int defaultBeamWidth; - private final Supplier defaultFormatSupplier; - private final BiFunction formatSupplier; - - /** - * Parameterized ctor of BasePerFieldCorrelationVectorsFormat - * @param mapperService mapper service - * @param defaultMaxConnections default m - * @param defaultBeamWidth default ef_construction - * @param defaultFormatSupplier default format supplier - * @param formatSupplier format supplier - */ - public BasePerFieldCorrelationVectorsFormat( - Optional mapperService, - int defaultMaxConnections, - int defaultBeamWidth, - Supplier defaultFormatSupplier, - BiFunction formatSupplier - ) { - this.mapperService = mapperService; - this.defaultMaxConnections = defaultMaxConnections; - this.defaultBeamWidth = defaultBeamWidth; - this.defaultFormatSupplier = defaultFormatSupplier; - this.formatSupplier = formatSupplier; - } - - @Override - public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - if (!isCorrelationVectorFieldType(field)) { - return defaultFormatSupplier.get(); - } - - var type = (CorrelationVectorFieldMapper.CorrelationVectorFieldType) mapperService.orElseThrow( - () -> new IllegalArgumentException( - String.format(Locale.getDefault(), "Cannot read field type for field [%s] because mapper service is not available", field) - ) - ).fieldType(field); - - var params = type.getCorrelationParams().getParameters(); - int maxConnections = getMaxConnections(params); - int beamWidth = getBeamWidth(params); - - return formatSupplier.apply(maxConnections, beamWidth); - } - - private boolean isCorrelationVectorFieldType(final String field) { - return mapperService.isPresent() - && mapperService.get().fieldType(field) instanceof CorrelationVectorFieldMapper.CorrelationVectorFieldType; - } - - private int getMaxConnections(final Map params) { - if (params != null && params.containsKey(METHOD_PARAMETER_M)) { - return (int) params.get(METHOD_PARAMETER_M); - } - return defaultMaxConnections; - } - - private int getBeamWidth(final Map params) { - if (params != null && params.containsKey(METHOD_PARAMETER_EF_CONSTRUCTION)) { - return (int) params.get(METHOD_PARAMETER_EF_CONSTRUCTION); - } - return defaultBeamWidth; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecService.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecService.java deleted file mode 100644 index 09d5e1d2c19e3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecService.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec; - -import org.apache.lucene.codecs.Codec; -import org.opensearch.index.codec.CodecService; -import org.opensearch.index.codec.CodecServiceConfig; -import org.opensearch.index.mapper.MapperService; - -/** - * custom Correlation Codec Service - * - * @opensearch.internal - */ -public class CorrelationCodecService extends CodecService { - - private final MapperService mapperService; - - /** - * Parameterized ctor for CorrelationCodecService - * @param codecServiceConfig Generic codec service config - */ - public CorrelationCodecService(CodecServiceConfig codecServiceConfig) { - super(codecServiceConfig.getMapperService(), codecServiceConfig.getIndexSettings(), codecServiceConfig.getLogger()); - mapperService = codecServiceConfig.getMapperService(); - } - - @Override - public Codec codec(String name) { - return CorrelationCodecVersion.current().getCorrelationCodecSupplier().apply(super.codec(name), mapperService); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java deleted file mode 100644 index 9dbb695f14b78..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec; - -import org.apache.lucene.backward_codecs.lucene99.Lucene99Codec; -import org.apache.lucene.codecs.Codec; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec; -import org.opensearch.plugin.correlation.core.index.codec.correlation990.PerFieldCorrelationVectorsFormat; - -import java.util.Optional; -import java.util.function.BiFunction; -import java.util.function.Supplier; - -/** - * CorrelationCodecVersion enum - * - * @opensearch.internal - */ -public enum CorrelationCodecVersion { - V_9_9_0( - "CorrelationCodec", - new Lucene99Codec(), - new PerFieldCorrelationVectorsFormat(Optional.empty()), - (userCodec, mapperService) -> new CorrelationCodec(userCodec, new PerFieldCorrelationVectorsFormat(Optional.of(mapperService))), - CorrelationCodec::new - ); - - private static final CorrelationCodecVersion CURRENT = V_9_9_0; - private final String codecName; - private final Codec defaultCodecDelegate; - private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; - private final BiFunction correlationCodecSupplier; - private final Supplier defaultCorrelationCodecSupplier; - - CorrelationCodecVersion( - String codecName, - Codec defaultCodecDelegate, - PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat, - BiFunction correlationCodecSupplier, - Supplier defaultCorrelationCodecSupplier - ) { - this.codecName = codecName; - this.defaultCodecDelegate = defaultCodecDelegate; - this.perFieldCorrelationVectorsFormat = perFieldCorrelationVectorsFormat; - this.correlationCodecSupplier = correlationCodecSupplier; - this.defaultCorrelationCodecSupplier = defaultCorrelationCodecSupplier; - } - - /** - * get codec name - * @return codec name - */ - public String getCodecName() { - return codecName; - } - - /** - * get default codec delegate - * @return default codec delegate - */ - public Codec getDefaultCodecDelegate() { - return defaultCodecDelegate; - } - - /** - * get correlation vectors format - * @return correlation vectors format - */ - public PerFieldCorrelationVectorsFormat getPerFieldCorrelationVectorsFormat() { - return perFieldCorrelationVectorsFormat; - } - - /** - * get correlation codec supplier - * @return correlation codec supplier - */ - public BiFunction getCorrelationCodecSupplier() { - return correlationCodecSupplier; - } - - /** - * get default correlation codec supplier - * @return default correlation codec supplier - */ - public Supplier getDefaultCorrelationCodecSupplier() { - return defaultCorrelationCodecSupplier; - } - - /** - * static method to get correlation codec version - * @return correlation codec version - */ - public static final CorrelationCodecVersion current() { - return CURRENT; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java deleted file mode 100644 index 022972e2e06c3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec.correlation990; - -import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.KnnVectorsFormat; -import org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion; - -/** - * Correlation Codec class - * - * @opensearch.internal - */ -public class CorrelationCodec extends FilterCodec { - private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_9_0; - private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; - - /** - * ctor for CorrelationCodec - */ - public CorrelationCodec() { - this(VERSION.getDefaultCodecDelegate(), VERSION.getPerFieldCorrelationVectorsFormat()); - } - - /** - * Parameterized ctor for CorrelationCodec - * @param delegate codec delegate - * @param perFieldCorrelationVectorsFormat correlation vectors format - */ - public CorrelationCodec(Codec delegate, PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat) { - super(VERSION.getCodecName(), delegate); - this.perFieldCorrelationVectorsFormat = perFieldCorrelationVectorsFormat; - } - - @Override - public KnnVectorsFormat knnVectorsFormat() { - return perFieldCorrelationVectorsFormat; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java deleted file mode 100644 index 89cc0b614a1a5..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec.correlation990; - -import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat; - -import java.util.Optional; - -/** - * Class to define the hyper-parameters m and ef_construction for insert and store of correlation vectors into HNSW graphs based lucene index. - */ -public class PerFieldCorrelationVectorsFormat extends BasePerFieldCorrelationVectorsFormat { - - /** - * Parameterized ctor for PerFieldCorrelationVectorsFormat - * @param mapperService mapper service - */ - public PerFieldCorrelationVectorsFormat(final Optional mapperService) { - super( - mapperService, - Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - Lucene99HnswVectorsFormat::new, - Lucene99HnswVectorsFormat::new - ); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java deleted file mode 100644 index fc2a9de58a73a..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * custom Lucene9.5 codec package for events-correlation-engine - */ -package org.opensearch.plugin.correlation.core.index.codec.correlation990; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/package-info.java deleted file mode 100644 index 862b7cd253f04..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * custom codec package for events-correlation-engine - */ -package org.opensearch.plugin.correlation.core.index.codec; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapper.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapper.java deleted file mode 100644 index 18c9dd222e2cf..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapper.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.mapper; - -import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.KnnFloatVectorField; -import org.apache.lucene.document.StoredField; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.common.Explicit; -import org.opensearch.index.mapper.FieldMapper; -import org.opensearch.index.mapper.ParseContext; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.plugin.correlation.core.index.VectorField; - -import java.io.IOException; -import java.util.Locale; -import java.util.Optional; - -/** - * Field mapper for the correlation vector type - * - * @opensearch.internal - */ -public class CorrelationVectorFieldMapper extends VectorFieldMapper { - - private static final int LUCENE_MAX_DIMENSION = KnnVectorsFormat.DEFAULT_MAX_DIMENSIONS; - - private final FieldType vectorFieldType; - - /** - * Parameterized ctor for CorrelationVectorFieldMapper - * @param input Object containing name of the field, type and other details. - */ - public CorrelationVectorFieldMapper(final CreateLuceneFieldMapperInput input) { - super( - input.getName(), - input.getMappedFieldType(), - input.getMultiFields(), - input.getCopyTo(), - input.getIgnoreMalformed(), - input.isStored(), - input.isHasDocValues() - ); - - this.correlationParams = input.getCorrelationParams(); - final VectorSimilarityFunction vectorSimilarityFunction = this.correlationParams.getSimilarityFunction(); - - final int dimension = input.getMappedFieldType().getDimension(); - if (dimension > LUCENE_MAX_DIMENSION) { - throw new IllegalArgumentException( - String.format( - Locale.ROOT, - "Dimension value cannot be greater than [%s] but got [%s] for vector [%s]", - LUCENE_MAX_DIMENSION, - dimension, - input.getName() - ) - ); - } - - this.fieldType = KnnFloatVectorField.createFieldType(dimension, vectorSimilarityFunction); - - if (this.hasDocValues) { - this.vectorFieldType = buildDocValuesFieldType(); - } else { - this.vectorFieldType = null; - } - } - - private static FieldType buildDocValuesFieldType() { - FieldType field = new FieldType(); - field.setDocValuesType(DocValuesType.BINARY); - field.freeze(); - return field; - } - - @Override - protected void parseCreateField(ParseContext context, int dimension) throws IOException { - Optional arrayOptional = getFloatsFromContext(context, dimension); - - if (arrayOptional.isEmpty()) { - return; - } - final float[] array = arrayOptional.get(); - - KnnFloatVectorField point = new KnnFloatVectorField(name(), array, fieldType); - - context.doc().add(point); - if (fieldType.stored()) { - context.doc().add(new StoredField(name(), point.toString())); - } - if (hasDocValues && vectorFieldType != null) { - context.doc().add(new VectorField(name(), array, vectorFieldType)); - } - context.path().remove(); - } - - static class CreateLuceneFieldMapperInput { - String name; - - CorrelationVectorFieldType mappedFieldType; - - FieldMapper.MultiFields multiFields; - - FieldMapper.CopyTo copyTo; - - Explicit ignoreMalformed; - boolean stored; - boolean hasDocValues; - - CorrelationParamsContext correlationParams; - - public CreateLuceneFieldMapperInput( - String name, - CorrelationVectorFieldType mappedFieldType, - FieldMapper.MultiFields multiFields, - FieldMapper.CopyTo copyTo, - Explicit ignoreMalformed, - boolean stored, - boolean hasDocValues, - CorrelationParamsContext correlationParams - ) { - this.name = name; - this.mappedFieldType = mappedFieldType; - this.multiFields = multiFields; - this.copyTo = copyTo; - this.ignoreMalformed = ignoreMalformed; - this.stored = stored; - this.hasDocValues = hasDocValues; - this.correlationParams = correlationParams; - } - - public String getName() { - return name; - } - - public CorrelationVectorFieldType getMappedFieldType() { - return mappedFieldType; - } - - public FieldMapper.MultiFields getMultiFields() { - return multiFields; - } - - public FieldMapper.CopyTo getCopyTo() { - return copyTo; - } - - public Explicit getIgnoreMalformed() { - return ignoreMalformed; - } - - public boolean isStored() { - return stored; - } - - public boolean isHasDocValues() { - return hasDocValues; - } - - public CorrelationParamsContext getCorrelationParams() { - return correlationParams; - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/VectorFieldMapper.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/VectorFieldMapper.java deleted file mode 100644 index 5ac6d92792295..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/VectorFieldMapper.java +++ /dev/null @@ -1,399 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.mapper; - -import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.Explicit; -import org.opensearch.common.xcontent.support.XContentMapValues; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.mapper.FieldMapper; -import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.Mapper; -import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.index.mapper.ParametrizedFieldMapper; -import org.opensearch.index.mapper.ParseContext; -import org.opensearch.index.mapper.TextSearchInfo; -import org.opensearch.index.mapper.ValueFetcher; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.QueryShardException; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.search.lookup.SearchLookup; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Optional; - -/** - * Parameterized field mapper for Correlation Vector type - * - * @opensearch.internal - */ -public abstract class VectorFieldMapper extends ParametrizedFieldMapper { - - /** - * name of Correlation Vector type - */ - public static final String CONTENT_TYPE = "correlation_vector"; - /** - * dimension of the correlation vectors - */ - public static final String DIMENSION = "dimension"; - /** - * context e.g. parameters and vector similarity function of Correlation Vector type - */ - public static final String CORRELATION_CONTEXT = "correlation_ctx"; - - private static VectorFieldMapper toType(FieldMapper in) { - return (VectorFieldMapper) in; - } - - /** - * definition of VectorFieldMapper.Builder - */ - public static class Builder extends ParametrizedFieldMapper.Builder { - protected Boolean ignoreMalformed; - - protected final Parameter stored = Parameter.boolParam("store", false, m -> toType(m).stored, false); - protected final Parameter hasDocValues = Parameter.boolParam("doc_values", false, m -> toType(m).hasDocValues, true); - protected final Parameter dimension = new Parameter<>(DIMENSION, false, () -> -1, (n, c, o) -> { - if (o == null) { - throw new IllegalArgumentException("Dimension cannot be null"); - } - int value; - try { - value = XContentMapValues.nodeIntegerValue(o); - } catch (Exception ex) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "Unable to parse [dimension] from provided value [%s] for vector [%s]", o, name) - ); - } - if (value <= 0) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "Dimension value must be greater than 0 for vector: %s", name) - ); - } - return value; - }, m -> toType(m).dimension); - - protected final Parameter correlationParamsContext = new Parameter<>( - CORRELATION_CONTEXT, - false, - () -> null, - (n, c, o) -> CorrelationParamsContext.parse(o), - m -> toType(m).correlationParams - ); - - protected final Parameter> meta = Parameter.metaParam(); - - /** - * Parameterized ctor for VectorFieldMapper.Builder - * @param name name - */ - public Builder(String name) { - super(name); - } - - @Override - protected List> getParameters() { - return Arrays.asList(stored, hasDocValues, dimension, meta, correlationParamsContext); - } - - protected Explicit ignoreMalformed(BuilderContext context) { - if (ignoreMalformed != null) { - return new Explicit<>(ignoreMalformed, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); - } - return Defaults.IGNORE_MALFORMED; - } - - @Override - public ParametrizedFieldMapper build(BuilderContext context) { - final CorrelationParamsContext correlationParams = correlationParamsContext.getValue(); - final MultiFields multiFieldsBuilder = this.multiFieldsBuilder.build(this, context); - final CopyTo copyToBuilder = copyTo.build(); - final Explicit ignoreMalformed = ignoreMalformed(context); - final Map metaValue = meta.getValue(); - - final CorrelationVectorFieldType mappedFieldType = new CorrelationVectorFieldType( - buildFullName(context), - metaValue, - dimension.getValue(), - correlationParams - ); - - CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput createLuceneFieldMapperInput = - new CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput( - name, - mappedFieldType, - multiFieldsBuilder, - copyToBuilder, - ignoreMalformed, - stored.get(), - hasDocValues.get(), - correlationParams - ); - return new CorrelationVectorFieldMapper(createLuceneFieldMapperInput); - } - } - - /** - * deifintion of VectorFieldMapper.TypeParser - */ - public static class TypeParser implements Mapper.TypeParser { - - /** - * default constructor of VectorFieldMapper.TypeParser - */ - public TypeParser() {} - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext context) throws MapperParsingException { - Builder builder = new VectorFieldMapper.Builder(name); - builder.parse(name, context, node); - - if (builder.dimension.getValue() == -1) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "Dimension value missing for vector: %s", name)); - } - return builder; - } - } - - /** - * deifintion of VectorFieldMapper.CorrelationVectorFieldType - */ - public static class CorrelationVectorFieldType extends MappedFieldType { - int dimension; - CorrelationParamsContext correlationParams; - - /** - * Parameterized ctor for VectorFieldMapper.CorrelationVectorFieldType - * @param name name of the field - * @param meta meta of the field - * @param dimension dimension of the field - */ - public CorrelationVectorFieldType(String name, Map meta, int dimension) { - this(name, meta, dimension, null); - } - - /** - * Parameterized ctor for VectorFieldMapper.CorrelationVectorFieldType - * @param name name of the field - * @param meta meta of the field - * @param dimension dimension of the field - * @param correlationParams correlation params for the field - */ - public CorrelationVectorFieldType( - String name, - Map meta, - int dimension, - CorrelationParamsContext correlationParams - ) { - super(name, false, false, true, TextSearchInfo.NONE, meta); - this.dimension = dimension; - this.correlationParams = correlationParams; - } - - @Override - public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String s) { - throw new UnsupportedOperationException("Correlation Vector do not support fields search"); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Query existsQuery(QueryShardContext context) { - return new FieldExistsQuery(name()); - } - - @Override - public Query termQuery(Object o, QueryShardContext context) { - throw new QueryShardException( - context, - String.format( - Locale.getDefault(), - "Correlation vector do not support exact searching, use Correlation queries instead: [%s]", - name() - ) - ); - } - - /** - * get dimension - * @return dimension - */ - public int getDimension() { - return dimension; - } - - /** - * get correlation params - * @return correlation params - */ - public CorrelationParamsContext getCorrelationParams() { - return correlationParams; - } - } - - protected Explicit ignoreMalformed; - protected boolean stored; - protected boolean hasDocValues; - protected Integer dimension; - protected CorrelationParamsContext correlationParams; - - /** - * Parameterized ctor for VectorFieldMapper - * @param simpleName name of field - * @param mappedFieldType field type of field - * @param multiFields multi fields - * @param copyTo copy to - * @param ignoreMalformed ignore malformed - * @param stored stored field - * @param hasDocValues has doc values - */ - public VectorFieldMapper( - String simpleName, - CorrelationVectorFieldType mappedFieldType, - FieldMapper.MultiFields multiFields, - FieldMapper.CopyTo copyTo, - Explicit ignoreMalformed, - boolean stored, - boolean hasDocValues - ) { - super(simpleName, mappedFieldType, multiFields, copyTo); - this.ignoreMalformed = ignoreMalformed; - this.stored = stored; - this.hasDocValues = hasDocValues; - this.dimension = mappedFieldType.getDimension(); - } - - @Override - protected VectorFieldMapper clone() { - return (VectorFieldMapper) super.clone(); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void parseCreateField(ParseContext parseContext) throws IOException { - parseCreateField(parseContext, fieldType().getDimension()); - } - - protected abstract void parseCreateField(ParseContext parseContext, int dimension) throws IOException; - - Optional getFloatsFromContext(ParseContext context, int dimension) throws IOException { - context.path().add(simpleName()); - - List vector = new ArrayList<>(); - XContentParser.Token token = context.parser().currentToken(); - float value; - if (token == XContentParser.Token.START_ARRAY) { - token = context.parser().nextToken(); - while (token != XContentParser.Token.END_ARRAY) { - value = context.parser().floatValue(); - - if (Float.isNaN(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be NaN"); - } - - if (Float.isInfinite(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be infinity"); - } - vector.add(value); - token = context.parser().nextToken(); - } - } else if (token == XContentParser.Token.VALUE_NUMBER) { - value = context.parser().floatValue(); - if (Float.isNaN(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be NaN"); - } - - if (Float.isInfinite(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be infinity"); - } - vector.add(value); - context.parser().nextToken(); - } else if (token == XContentParser.Token.VALUE_NULL) { - context.path().remove(); - return Optional.empty(); - } - - if (dimension != vector.size()) { - String errorMessage = String.format( - Locale.ROOT, - "Vector dimension mismatch. Expected: %d, Given: %d", - dimension, - vector.size() - ); - throw new IllegalArgumentException(errorMessage); - } - - float[] array = new float[vector.size()]; - int i = 0; - for (Float f : vector) { - array[i++] = f; - } - return Optional.of(array); - } - - @Override - protected boolean docValuesByDefault() { - return true; - } - - @Override - public ParametrizedFieldMapper.Builder getMergeBuilder() { - return new VectorFieldMapper.Builder(simpleName()).init(this); - } - - @Override - public boolean parsesArrayValue() { - return true; - } - - @Override - public CorrelationVectorFieldType fieldType() { - return (CorrelationVectorFieldType) super.fieldType(); - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || ignoreMalformed.explicit()) { - builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); - } - } - - /** - * Class for constants used in parent class VectorFieldMapper - */ - public static class Names { - public static final String IGNORE_MALFORMED = "ignore_malformed"; - } - - /** - * Class for constants used in parent class VectorFieldMapper - */ - public static class Defaults { - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/package-info.java deleted file mode 100644 index 4fdc622c3d886..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * correlation field mapper package - */ -package org.opensearch.plugin.correlation.core.index.mapper; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/package-info.java deleted file mode 100644 index cfc0ffdfa81f1..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * package to wrap Lucene KnnFloatVectorField and KnnFloatVectorQuery for Opensearch events-correlation-engine - */ -package org.opensearch.plugin.correlation.core.index; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilder.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilder.java deleted file mode 100644 index e95b68e855cca..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilder.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.query; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.Query; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.ParsingException; -import org.opensearch.core.common.Strings; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.NumberFieldMapper; -import org.opensearch.index.query.AbstractQueryBuilder; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.WithFieldName; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -/** - * Constructs a query to get correlated events or documents for a particular event or document. - * - * @opensearch.internal - */ -public class CorrelationQueryBuilder extends AbstractQueryBuilder implements WithFieldName { - - private static final Logger log = LogManager.getLogger(CorrelationQueryBuilder.class); - protected static final ParseField VECTOR_FIELD = new ParseField("vector"); - protected static final ParseField K_FIELD = new ParseField("k"); - protected static final ParseField FILTER_FIELD = new ParseField("filter"); - /** - * max number of neighbors that can be retrieved. - */ - public static int K_MAX = 10000; - - /** - * name of the query - */ - public static final ParseField NAME_FIELD = new ParseField("correlation"); - - private String fieldName; - private float[] vector; - private int k = 0; - private double boost; - private QueryBuilder filter; - - private CorrelationQueryBuilder() {} - - /** - * parameterized ctor for CorrelationQueryBuilder - * @param fieldName field name for query - * @param vector query vector - * @param k number of nearby neighbors - */ - public CorrelationQueryBuilder(String fieldName, float[] vector, int k) { - this(fieldName, vector, k, null); - } - - /** - * parameterized ctor for CorrelationQueryBuilder - * @param fieldName field name for query - * @param vector query vector - * @param k number of nearby neighbors - * @param filter optional filter query - */ - public CorrelationQueryBuilder(String fieldName, float[] vector, int k, QueryBuilder filter) { - if (Strings.isNullOrEmpty(fieldName)) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "[%s] requires fieldName", NAME_FIELD.getPreferredName()) - ); - } - if (vector == null) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "[%s] requires query vector", NAME_FIELD.getPreferredName()) - ); - } - if (vector.length == 0) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "[%s] query vector is empty", NAME_FIELD.getPreferredName()) - ); - } - if (k <= 0) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires k > 0", NAME_FIELD.getPreferredName())); - } - if (k > K_MAX) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires k <= ", K_MAX)); - } - - this.fieldName = fieldName; - this.vector = vector; - this.k = k; - this.filter = filter; - } - - /** - * parameterized ctor for CorrelationQueryBuilder - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationQueryBuilder(StreamInput sin) throws IOException { - super(sin); - this.fieldName = sin.readString(); - this.vector = sin.readFloatArray(); - this.k = sin.readInt(); - this.filter = sin.readOptionalNamedWriteable(QueryBuilder.class); - } - - private static float[] objectsToFloats(List objs) { - float[] vector = new float[objs.size()]; - for (int i = 0; i < objs.size(); ++i) { - vector[i] = ((Number) objs.get(i)).floatValue(); - } - return vector; - } - - /** - * parse into CorrelationQueryBuilder - * @param xcp XContentParser - * @return CorrelationQueryBuilder - */ - public static CorrelationQueryBuilder parse(XContentParser xcp) throws IOException { - String fieldName = null; - List vector = null; - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - - int k = 0; - QueryBuilder filter = null; - String queryName = null; - String currentFieldName = null; - XContentParser.Token token; - while ((token = xcp.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = xcp.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - throwParsingExceptionOnMultipleFields(NAME_FIELD.getPreferredName(), xcp.getTokenLocation(), fieldName, currentFieldName); - fieldName = currentFieldName; - while ((token = xcp.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = xcp.currentName(); - } else if (token.isValue() || token == XContentParser.Token.START_ARRAY) { - if (VECTOR_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - vector = xcp.list(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - boost = xcp.floatValue(); - } else if (K_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - k = (Integer) NumberFieldMapper.NumberType.INTEGER.parse(xcp.objectBytes(), false); - } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - queryName = xcp.text(); - } else { - throw new ParsingException( - xcp.getTokenLocation(), - "[" + NAME_FIELD.getPreferredName() + "] query does not support [" + currentFieldName + "]" - ); - } - } else if (token == XContentParser.Token.START_OBJECT) { - String tokenName = xcp.currentName(); - if (FILTER_FIELD.getPreferredName().equals(tokenName)) { - filter = parseInnerQueryBuilder(xcp); - } else { - throw new ParsingException( - xcp.getTokenLocation(), - "[" + NAME_FIELD.getPreferredName() + "] unknown token [" + token + "]" - ); - } - } else { - throw new ParsingException( - xcp.getTokenLocation(), - "[" + NAME_FIELD.getPreferredName() + "] unknown token [" + token + "] after [" + currentFieldName + "]" - ); - } - } - } else { - throwParsingExceptionOnMultipleFields(NAME_FIELD.getPreferredName(), xcp.getTokenLocation(), fieldName, xcp.currentName()); - fieldName = xcp.currentName(); - vector = xcp.list(); - } - } - - assert vector != null; - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(fieldName, objectsToFloats(vector), k, filter); - correlationQueryBuilder.queryName(queryName); - correlationQueryBuilder.boost(boost); - return correlationQueryBuilder; - } - - public void setFieldName(String fieldName) { - this.fieldName = fieldName; - } - - /** - * get field name - * @return field name - */ - @Override - public String fieldName() { - return fieldName; - } - - public void setVector(float[] vector) { - this.vector = vector; - } - - /** - * get query vector - * @return query vector - */ - public Object vector() { - return vector; - } - - public void setK(int k) { - this.k = k; - } - - /** - * get number of nearby neighbors - * @return number of nearby neighbors - */ - public int getK() { - return k; - } - - public void setBoost(double boost) { - this.boost = boost; - } - - /** - * get boost - * @return boost - */ - public double getBoost() { - return boost; - } - - public void setFilter(QueryBuilder filter) { - this.filter = filter; - } - - /** - * get optional filter - * @return optional filter - */ - public QueryBuilder getFilter() { - return filter; - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - out.writeFloatArray(vector); - out.writeInt(k); - out.writeOptionalNamedWriteable(filter); - } - - @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(fieldName); - - builder.field(VECTOR_FIELD.getPreferredName(), vector); - builder.field(K_FIELD.getPreferredName(), k); - if (filter != null) { - builder.field(FILTER_FIELD.getPreferredName(), filter); - } - printBoostAndQueryName(builder); - builder.endObject(); - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - MappedFieldType mappedFieldType = context.fieldMapper(fieldName); - - if (!(mappedFieldType instanceof VectorFieldMapper.CorrelationVectorFieldType)) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "Field '%s' is not knn_vector type.", this.fieldName)); - } - - VectorFieldMapper.CorrelationVectorFieldType correlationVectorFieldType = - (VectorFieldMapper.CorrelationVectorFieldType) mappedFieldType; - int fieldDimension = correlationVectorFieldType.getDimension(); - - if (fieldDimension != vector.length) { - throw new IllegalArgumentException( - String.format( - Locale.getDefault(), - "Query vector has invalid dimension: %d. Dimension should be: %d", - vector.length, - fieldDimension - ) - ); - } - - String indexName = context.index().getName(); - CorrelationQueryFactory.CreateQueryRequest createQueryRequest = new CorrelationQueryFactory.CreateQueryRequest( - indexName, - this.fieldName, - this.vector, - this.k, - this.filter, - context - ); - return CorrelationQueryFactory.create(createQueryRequest); - } - - @Override - protected boolean doEquals(CorrelationQueryBuilder other) { - return Objects.equals(fieldName, other.fieldName) && Arrays.equals(vector, other.vector) && Objects.equals(k, other.k); - } - - @Override - protected int doHashCode() { - return Objects.hash(fieldName, vector, k); - } - - @Override - public String getWriteableName() { - return NAME_FIELD.getPreferredName(); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryFactory.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryFactory.java deleted file mode 100644 index d5db299bfa3a5..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryFactory.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.query; - -import org.apache.lucene.search.KnnFloatVectorQuery; -import org.apache.lucene.search.Query; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Optional; - -/** - * CorrelationQueryFactory util class is used to construct a Lucene KnnFloatVectorQuery. - * - * @opensearch.internal - */ -public class CorrelationQueryFactory { - - /** - * static method which takes input params to construct a Lucene KnnFloatVectorQuery. - * @param createQueryRequest object parameter containing inputs for constructing Lucene KnnFloatVectorQuery. - * @return generic Lucene Query object - */ - public static Query create(CreateQueryRequest createQueryRequest) { - final String indexName = createQueryRequest.getIndexName(); - final String fieldName = createQueryRequest.getFieldName(); - final int k = createQueryRequest.getK(); - final float[] vector = createQueryRequest.getVector(); - - if (createQueryRequest.getFilter().isPresent()) { - final QueryShardContext context = createQueryRequest.getContext() - .orElseThrow(() -> new RuntimeException("Shard context cannot be null")); - - try { - final Query filterQuery = createQueryRequest.getFilter().get().toQuery(context); - return new KnnFloatVectorQuery(fieldName, vector, k, filterQuery); - } catch (IOException ex) { - throw new RuntimeException("Cannot create knn query with filter", ex); - } - } - return new KnnFloatVectorQuery(fieldName, vector, k); - } - - /** - * class containing params to construct a Lucene KnnFloatVectorQuery. - * - * @opensearch.internal - */ - public static class CreateQueryRequest { - private String indexName; - - private String fieldName; - - private float[] vector; - - private int k; - - private QueryBuilder filter; - - private QueryShardContext context; - - /** - * Parameterized ctor for CreateQueryRequest - * @param indexName index name - * @param fieldName field name - * @param vector query vector - * @param k number of nearby neighbors - * @param filter additional filter query - * @param context QueryShardContext - */ - public CreateQueryRequest( - String indexName, - String fieldName, - float[] vector, - int k, - QueryBuilder filter, - QueryShardContext context - ) { - this.indexName = indexName; - this.fieldName = fieldName; - this.vector = vector; - this.k = k; - this.filter = filter; - this.context = context; - } - - /** - * get index name - * @return get index name - */ - public String getIndexName() { - return indexName; - } - - /** - * get field name - * @return get field name - */ - public String getFieldName() { - return fieldName; - } - - /** - * get vector - * @return get vector - */ - public float[] getVector() { - return vector; - } - - /** - * get number of nearby neighbors - * @return number of nearby neighbors - */ - public int getK() { - return k; - } - - /** - * get optional filter query - * @return get optional filter query - */ - public Optional getFilter() { - return Optional.ofNullable(filter); - } - - /** - * get optional query shard context - * @return get optional query shard context - */ - public Optional getContext() { - return Optional.ofNullable(context); - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/package-info.java deleted file mode 100644 index 2cf5db786a60f..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * correlation query builder package - */ -package org.opensearch.plugin.correlation.core.index.query; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/package-info.java deleted file mode 100644 index 82be787af5a72..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * base package of events-correlation-engine - */ -package org.opensearch.plugin.correlation; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleAction.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleAction.java deleted file mode 100644 index ab6f05ec0e6a3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleAction.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.action; - -import org.opensearch.action.ActionType; - -/** - * Transport Action for indexing correlation rules. - * - * @opensearch.internal - */ -public class IndexCorrelationRuleAction extends ActionType { - - /** - * Instance of IndexCorrelationRuleAction - */ - public static final IndexCorrelationRuleAction INSTANCE = new IndexCorrelationRuleAction(); - /** - * Name of IndexCorrelationRuleAction - */ - public static final String NAME = "cluster:admin/correlation/rules"; - - private IndexCorrelationRuleAction() { - super(NAME, IndexCorrelationRuleResponse::new); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleRequest.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleRequest.java deleted file mode 100644 index 3fe25d144059d..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleRequest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.action; - -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.rest.RestRequest; - -import java.io.IOException; - -/** - * A request to index correlation rules. - * - * @opensearch.internal - */ -public class IndexCorrelationRuleRequest extends ActionRequest { - - private String correlationRuleId; - - private CorrelationRule correlationRule; - - private RestRequest.Method method; - - /** - * Parameterized ctor for IndexCorrelationRuleRequest - * @param correlationRule correlation rule - * @param method Rest method of request PUT or POST - */ - public IndexCorrelationRuleRequest(CorrelationRule correlationRule, RestRequest.Method method) { - super(); - this.correlationRuleId = ""; - this.correlationRule = correlationRule; - this.method = method; - } - - /** - * Parameterized ctor for IndexCorrelationRuleRequest - * @param correlationRuleId correlation rule id - * @param correlationRule correlation rule - * @param method Rest method of request PUT or POST - */ - public IndexCorrelationRuleRequest(String correlationRuleId, CorrelationRule correlationRule, RestRequest.Method method) { - super(); - this.correlationRuleId = correlationRuleId; - this.correlationRule = correlationRule; - this.method = method; - } - - /** - * StreamInput ctor of IndexCorrelationRuleRequest - * @param sin StreamInput - * @throws IOException IOException - */ - public IndexCorrelationRuleRequest(StreamInput sin) throws IOException { - this(sin.readString(), CorrelationRule.readFrom(sin), sin.readEnum(RestRequest.Method.class)); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(correlationRuleId); - correlationRule.writeTo(out); - } - - /** - * get correlation rule id - * @return correlation rule id - */ - public String getCorrelationRuleId() { - return correlationRuleId; - } - - /** - * get correlation rule - * @return correlation rule - */ - public CorrelationRule getCorrelationRule() { - return correlationRule; - } - - /** - * get Rest method - * @return Rest method - */ - public RestRequest.Method getMethod() { - return method; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleResponse.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleResponse.java deleted file mode 100644 index 8102e6585825e..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleResponse.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.action; - -import org.opensearch.core.ParseField; -import org.opensearch.core.action.ActionResponse; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; - -import java.io.IOException; - -/** - * Transport Response for indexing correlation rules. - * - * @opensearch.internal - */ -public class IndexCorrelationRuleResponse extends ActionResponse implements ToXContentObject { - - private static final ParseField _ID = new ParseField("_id"); - private static final ParseField _VERSION = new ParseField("_version"); - - private String id; - - private Long version; - - private RestStatus status; - - private CorrelationRule correlationRule; - - /** - * Parameterized ctor for IndexCorrelationRuleResponse - * @param version version of rule - * @param status Rest status of indexing rule - * @param correlationRule correlation rule - */ - public IndexCorrelationRuleResponse(String id, Long version, RestStatus status, CorrelationRule correlationRule) { - super(); - this.id = id; - this.version = version; - this.status = status; - this.correlationRule = correlationRule; - } - - /** - * StreamInput ctor of IndexCorrelationRuleResponse - * @param sin StreamInput - * @throws IOException IOException - */ - public IndexCorrelationRuleResponse(StreamInput sin) throws IOException { - this(sin.readString(), sin.readLong(), sin.readEnum(RestStatus.class), CorrelationRule.readFrom(sin)); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject().field(_ID.getPreferredName(), id).field(_VERSION.getPreferredName(), version); - - builder.field("rule", correlationRule); - return builder.endObject(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - out.writeLong(version); - out.writeEnum(status); - correlationRule.writeTo(out); - } - - /** - * get id - * @return id of rule - */ - public String getId() { - return id; - } - - /** - * get status - * @return Rest status of indexing rule - */ - public RestStatus getStatus() { - return status; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/package-info.java deleted file mode 100644 index c01f2936a20ca..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Transport Actions, Requests and Responses for correlation rules - */ -package org.opensearch.plugin.correlation.rules.action; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationQuery.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationQuery.java deleted file mode 100644 index 3797e0c7043dc..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationQuery.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.model; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -/** - * Correlation Query DSL - * { - * "index": "s3_access_logs", - * "query": "aws.cloudtrail.eventName:ReplicateObject", - * "timestampField": "@timestamp", - * "tags": [ - * "s3" - * ] - * } - */ -public class CorrelationQuery implements Writeable, ToXContentObject { - - private static final Logger log = LogManager.getLogger(CorrelationQuery.class); - private static final ParseField INDEX_FIELD = new ParseField("index"); - private static final ParseField QUERY_FIELD = new ParseField("query"); - private static final ParseField TIMESTAMP_FIELD = new ParseField("timestampField"); - private static final ParseField TAGS_FIELD = new ParseField("tags"); - private static final ObjectParser PARSER = new ObjectParser<>("CorrelationQuery", CorrelationQuery::new); - - static { - PARSER.declareString(CorrelationQuery::setIndex, INDEX_FIELD); - PARSER.declareString(CorrelationQuery::setQuery, QUERY_FIELD); - PARSER.declareStringOrNull(CorrelationQuery::setTimestampField, TIMESTAMP_FIELD); - PARSER.declareField((xcp, query, context) -> { - List tags = new ArrayList<>(); - XContentParser.Token currentToken = xcp.currentToken(); - if (currentToken == XContentParser.Token.START_ARRAY) { - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - tags.add(xcp.text()); - } - } - query.setTags(tags); - }, TAGS_FIELD, ObjectParser.ValueType.STRING_ARRAY); - } - - private String index; - - private String query; - - private String timestampField; - - private List tags; - - private CorrelationQuery() { - this.timestampField = "_timestamp"; - } - - /** - * Parameterized ctor of Correlation Query - * @param index event index to correlate - * @param query query to filter relevant events for correlations from index - * @param timestampField timestamp field in the index - * @param tags tags to store additional metadata as part of correlation queries. - */ - public CorrelationQuery(String index, String query, String timestampField, List tags) { - this.index = index; - this.query = query; - this.timestampField = timestampField != null ? timestampField : "_timestamp"; - this.tags = tags; - } - - /** - * StreamInput ctor of Correlation Query - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationQuery(StreamInput sin) throws IOException { - this(sin.readString(), sin.readString(), sin.readString(), sin.readStringList()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(index); - out.writeString(query); - out.writeString(timestampField); - out.writeStringCollection(tags); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX_FIELD.getPreferredName(), index) - .field(QUERY_FIELD.getPreferredName(), query) - .field(TIMESTAMP_FIELD.getPreferredName(), timestampField) - .field(TAGS_FIELD.getPreferredName(), tags); - return builder.endObject(); - } - - /** - * parse into CorrelationQuery - * @param xcp XContentParser - * @return CorrelationQuery - */ - public static CorrelationQuery parse(XContentParser xcp) { - return PARSER.apply(xcp, null); - } - - /** - * convert StreamInput to CorrelationQuery - * @param sin StreamInput - * @return CorrelationQuery - * @throws IOException IOException - */ - public static CorrelationQuery readFrom(StreamInput sin) throws IOException { - return new CorrelationQuery(sin); - } - - /** - * Set index - * @param index event index to correlate - */ - public void setIndex(String index) { - this.index = index; - } - - /** - * Get index - * @return event index to correlate - */ - public String getIndex() { - return index; - } - - /** - * Set query - * @param query query to filter relevant events for correlations from index - */ - public void setQuery(String query) { - this.query = query; - } - - /** - * Get query - * @return query to filter relevant events for correlations from index - */ - public String getQuery() { - return query; - } - - /** - * Set timestamp field - * @param timestampField timestamp field in the index - */ - public void setTimestampField(String timestampField) { - this.timestampField = timestampField != null ? timestampField : "_timestamp"; - } - - /** - * Get timestamp field - * @return timestamp field in the index - */ - public String getTimestampField() { - return timestampField; - } - - /** - * Set tags - * @param tags tags to store additional metadata as part of correlation queries. - */ - public void setTags(List tags) { - this.tags = tags; - } - - /** - * Get tags - * @return tags to store additional metadata as part of correlation queries. - */ - public List getTags() { - return tags; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationRule.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationRule.java deleted file mode 100644 index 6978d7248e199..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationRule.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.model; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; - -/** - * Correlation Rule DSL - * { - * "name": "s3 to app logs", - * "correlate": [ - * { - * "index": "s3_access_logs", - * "query": "aws.cloudtrail.eventName:ReplicateObject", - * "timestampField": "@timestamp", - * "tags": [ - * "s3" - * ] - * } - * ] - * } - * - * @opensearch.api - * @opensearch.experimental - */ -public class CorrelationRule implements Writeable, ToXContentObject { - - private static final Logger log = LogManager.getLogger(CorrelationRule.class); - - /** - * Correlation Rule Index - */ - public static final String CORRELATION_RULE_INDEX = ".opensearch-correlation-rules-config"; - - private static final ParseField ID_FIELD = new ParseField("id"); - private static final ParseField VERSION_FIELD = new ParseField("version"); - private static final ParseField NAME_FIELD = new ParseField("name"); - private static final ParseField CORRELATION_QUERIES_FIELD = new ParseField("correlate"); - private static final ObjectParser PARSER = new ObjectParser<>("CorrelationRule", CorrelationRule::new); - - static { - PARSER.declareString(CorrelationRule::setId, ID_FIELD); - PARSER.declareLong(CorrelationRule::setVersion, VERSION_FIELD); - PARSER.declareString(CorrelationRule::setName, NAME_FIELD); - PARSER.declareField((xcp, rule, context) -> { - List correlationQueries = new ArrayList<>(); - XContentParser.Token currentToken = xcp.currentToken(); - if (currentToken == XContentParser.Token.START_ARRAY) { - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - correlationQueries.add(CorrelationQuery.parse(xcp)); - } - } - rule.setCorrelationQueries(correlationQueries); - }, CORRELATION_QUERIES_FIELD, ObjectParser.ValueType.OBJECT_ARRAY); - } - - private String id; - - private Long version; - - private String name; - - private List correlationQueries; - - private CorrelationRule() {} - - /** - * Parameterized ctor of Correlation Rule - * @param name name of rule - * @param correlationQueries list of correlation queries part of rule - */ - public CorrelationRule(String name, List correlationQueries) { - this("", 1L, name, correlationQueries); - } - - /** - * Parameterized ctor of Correlation Rule - * @param id id of rule - * @param version version of rule - * @param name name of rule - * @param correlationQueries list of correlation queries part of rule - */ - public CorrelationRule(String id, Long version, String name, List correlationQueries) { - this.id = id; - this.version = version; - this.name = name; - this.correlationQueries = correlationQueries; - } - - /** - * StreamInput ctor of Correlation Rule - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationRule(StreamInput sin) throws IOException { - this(sin.readString(), sin.readLong(), sin.readString(), sin.readList(CorrelationQuery::readFrom)); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(ID_FIELD.getPreferredName(), id); - builder.field(VERSION_FIELD.getPreferredName(), version); - builder.field(NAME_FIELD.getPreferredName(), name); - - CorrelationQuery[] correlationQueries = new CorrelationQuery[] {}; - correlationQueries = this.correlationQueries.toArray(correlationQueries); - builder.field(CORRELATION_QUERIES_FIELD.getPreferredName(), correlationQueries); - return builder.endObject(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - out.writeLong(version); - out.writeString(name); - - for (CorrelationQuery query : correlationQueries) { - query.writeTo(out); - } - } - - /** - * parse into CorrelationRule - * @param xcp XContentParser - * @param id id of rule - * @param version version of rule - * @return CorrelationRule - */ - public static CorrelationRule parse(XContentParser xcp, String id, Long version) { - return PARSER.apply(xcp, null); - } - - /** - * convert StreamInput to CorrelationRule - * @param sin StreamInput - * @return CorrelationRule - * @throws IOException IOException - */ - public static CorrelationRule readFrom(StreamInput sin) throws IOException { - return new CorrelationRule(sin); - } - - /** - * set id - * @param id id of rule - */ - public void setId(String id) { - this.id = id; - } - - /** - * get id - * @return id of rule - */ - public String getId() { - return id; - } - - /** - * set version - * @param version version of rule - */ - public void setVersion(Long version) { - this.version = version; - } - - /** - * get version - * @return version of rule - */ - public Long getVersion() { - return version; - } - - /** - * set name - * @param name name of rule - */ - public void setName(String name) { - this.name = name; - } - - /** - * get name - * @return name of rule - */ - public String getName() { - return name; - } - - /** - * set correlation queries - * @param correlationQueries set correlation queries for the rule - */ - public void setCorrelationQueries(List correlationQueries) { - this.correlationQueries = correlationQueries; - } - - /** - * get correlation queries - * @return correlation queries for the rule - */ - public List getCorrelationQueries() { - return correlationQueries; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CorrelationRule that = (CorrelationRule) o; - return id.equals(that.id) - && version.equals(that.version) - && name.equals(that.name) - && correlationQueries.equals(that.correlationQueries); - } - - @Override - public int hashCode() { - return Objects.hash(id, version, name, correlationQueries); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/package-info.java deleted file mode 100644 index b04b7be3c62e3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * data models for correlation rules - */ -package org.opensearch.plugin.correlation.rules.model; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/RestIndexCorrelationRuleAction.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/RestIndexCorrelationRuleAction.java deleted file mode 100644 index 3b2b7eb02ae5f..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/RestIndexCorrelationRuleAction.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.resthandler; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.client.node.NodeClient; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.plugin.correlation.EventsCorrelationPlugin; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleRequest; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleResponse; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.BytesRestResponse; -import org.opensearch.rest.RestChannel; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestResponse; -import org.opensearch.rest.action.RestResponseListener; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; - -/** - * Rest action for indexing correlation rules. - * - * @opensearch.api - */ -public class RestIndexCorrelationRuleAction extends BaseRestHandler { - - private static final Logger log = LogManager.getLogger(RestIndexCorrelationRuleAction.class); - - /** - * Default constructor - */ - public RestIndexCorrelationRuleAction() {} - - @Override - public String getName() { - return "index_correlation_rule_action"; - } - - @Override - public List routes() { - return List.of( - new Route(RestRequest.Method.POST, EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI), - new Route( - RestRequest.Method.PUT, - String.format(Locale.ROOT, "%s/{%s}", EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI, "rule_id") - ) - ); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - log.debug(String.format(Locale.ROOT, "%s %s", request.method(), EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI)); - - String id = request.param("rule_id", ""); - - XContentParser xcp = request.contentParser(); - - CorrelationRule correlationRule = CorrelationRule.parse(xcp, id, 1L); - IndexCorrelationRuleRequest indexCorrelationRuleRequest = new IndexCorrelationRuleRequest(id, correlationRule, request.method()); - return channel -> client.execute( - IndexCorrelationRuleAction.INSTANCE, - indexCorrelationRuleRequest, - indexCorrelationRuleResponse(channel, request.method()) - ); - } - - private RestResponseListener indexCorrelationRuleResponse( - RestChannel channel, - RestRequest.Method restMethod - ) { - return new RestResponseListener<>(channel) { - @Override - public RestResponse buildResponse(IndexCorrelationRuleResponse response) throws Exception { - RestStatus returnStatus = RestStatus.CREATED; - if (restMethod == RestRequest.Method.PUT) { - returnStatus = RestStatus.OK; - } - - BytesRestResponse restResponse = new BytesRestResponse( - returnStatus, - response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS) - ); - - if (restMethod == RestRequest.Method.POST) { - String location = String.format( - Locale.ROOT, - "%s/%s", - EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI, - response.getId() - ); - restResponse.addHeader("Location", location); - } - - return restResponse; - } - }; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/package-info.java deleted file mode 100644 index 607ec355801ad..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Rest Handlers for correlation rules - */ -package org.opensearch.plugin.correlation.rules.resthandler; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/TransportIndexCorrelationRuleAction.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/TransportIndexCorrelationRuleAction.java deleted file mode 100644 index 7b4fb670c4aee..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/TransportIndexCorrelationRuleAction.java +++ /dev/null @@ -1,234 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.transport; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.OpenSearchStatusException; -import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.action.support.WriteRequest; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.inject.Inject; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleRequest; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleResponse; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.plugin.correlation.utils.CorrelationRuleIndices; -import org.opensearch.plugin.correlation.utils.IndexUtils; -import org.opensearch.rest.RestRequest; -import org.opensearch.tasks.Task; -import org.opensearch.transport.TransportService; - -import java.io.IOException; -import java.util.Locale; - -/** - * Transport Action for indexing correlation rules. - * - * @opensearch.internal - */ -public class TransportIndexCorrelationRuleAction extends HandledTransportAction { - - private static final Logger log = LogManager.getLogger(TransportIndexCorrelationRuleAction.class); - - private final Client client; - - private final CorrelationRuleIndices correlationRuleIndices; - - private final ClusterService clusterService; - - /** - * Parameterized ctor for Transport Action - * @param transportService TransportService - * @param client OS client - * @param actionFilters ActionFilters - * @param clusterService ClusterService - * @param correlationRuleIndices CorrelationRuleIndices which manages lifecycle of correlation rule index - */ - @Inject - public TransportIndexCorrelationRuleAction( - TransportService transportService, - Client client, - ActionFilters actionFilters, - ClusterService clusterService, - CorrelationRuleIndices correlationRuleIndices - ) { - super(IndexCorrelationRuleAction.NAME, transportService, actionFilters, IndexCorrelationRuleRequest::new); - this.client = client; - this.clusterService = clusterService; - this.correlationRuleIndices = correlationRuleIndices; - } - - @Override - protected void doExecute(Task task, IndexCorrelationRuleRequest request, ActionListener listener) { - AsyncIndexCorrelationRuleAction asyncAction = new AsyncIndexCorrelationRuleAction(request, listener); - asyncAction.start(); - } - - private class AsyncIndexCorrelationRuleAction { - private final IndexCorrelationRuleRequest request; - - private final ActionListener listener; - - AsyncIndexCorrelationRuleAction(IndexCorrelationRuleRequest request, ActionListener listener) { - this.request = request; - this.listener = listener; - } - - void start() { - try { - if (correlationRuleIndices.correlationRuleIndexExists() == false) { - try { - correlationRuleIndices.initCorrelationRuleIndex(new ActionListener<>() { - @Override - public void onResponse(CreateIndexResponse response) { - try { - onCreateMappingsResponse(response); - indexCorrelationRule(); - } catch (IOException e) { - onFailures(e); - } - } - - @Override - public void onFailure(Exception e) { - onFailures(e); - } - }); - } catch (IOException e) { - onFailures(e); - } - } else if (!IndexUtils.correlationRuleIndexUpdated) { - IndexUtils.updateIndexMapping( - CorrelationRule.CORRELATION_RULE_INDEX, - CorrelationRuleIndices.correlationRuleIndexMappings(), - clusterService.state(), - client.admin().indices(), - new ActionListener<>() { - @Override - public void onResponse(AcknowledgedResponse response) { - onUpdateMappingsResponse(response); - try { - indexCorrelationRule(); - } catch (IOException e) { - onFailures(e); - } - } - - @Override - public void onFailure(Exception e) { - onFailures(e); - } - } - ); - } else { - indexCorrelationRule(); - } - } catch (IOException ex) { - onFailures(ex); - } - } - - void indexCorrelationRule() throws IOException { - IndexRequest indexRequest; - if (request.getMethod() == RestRequest.Method.POST) { - indexRequest = new IndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).setRefreshPolicy( - WriteRequest.RefreshPolicy.IMMEDIATE - ) - .source(request.getCorrelationRule().toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .timeout(TimeValue.timeValueSeconds(60)); - } else { - indexRequest = new IndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).setRefreshPolicy( - WriteRequest.RefreshPolicy.IMMEDIATE - ) - .source(request.getCorrelationRule().toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .id(request.getCorrelationRuleId()) - .timeout(TimeValue.timeValueSeconds(60)); - } - - client.index(indexRequest, new ActionListener<>() { - @Override - public void onResponse(IndexResponse response) { - if (response.status().equals(RestStatus.CREATED) || response.status().equals(RestStatus.OK)) { - CorrelationRule ruleResponse = request.getCorrelationRule(); - ruleResponse.setId(response.getId()); - onOperation(ruleResponse); - } else { - onFailures(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); - } - } - - @Override - public void onFailure(Exception e) { - onFailures(e); - } - }); - } - - private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { - if (response.isAcknowledged()) { - log.info(String.format(Locale.ROOT, "Created %s with mappings.", CorrelationRule.CORRELATION_RULE_INDEX)); - IndexUtils.correlationRuleIndexUpdated(); - } else { - log.error(String.format(Locale.ROOT, "Create %s mappings call not acknowledged.", CorrelationRule.CORRELATION_RULE_INDEX)); - throw new OpenSearchStatusException( - String.format(Locale.getDefault(), "Create %s mappings call not acknowledged", CorrelationRule.CORRELATION_RULE_INDEX), - RestStatus.INTERNAL_SERVER_ERROR - ); - } - } - - private void onUpdateMappingsResponse(AcknowledgedResponse response) { - if (response.isAcknowledged()) { - log.info(String.format(Locale.ROOT, "Created %s with mappings.", CorrelationRule.CORRELATION_RULE_INDEX)); - IndexUtils.correlationRuleIndexUpdated(); - } else { - log.error(String.format(Locale.ROOT, "Create %s mappings call not acknowledged.", CorrelationRule.CORRELATION_RULE_INDEX)); - throw new OpenSearchStatusException( - String.format(Locale.getDefault(), "Create %s mappings call not acknowledged", CorrelationRule.CORRELATION_RULE_INDEX), - RestStatus.INTERNAL_SERVER_ERROR - ); - } - } - - private void onOperation(CorrelationRule correlationRule) { - finishHim(correlationRule, null); - } - - private void onFailures(Exception t) { - finishHim(null, t); - } - - private void finishHim(CorrelationRule correlationRule, Exception t) { - if (t != null) { - listener.onFailure(t); - } else { - listener.onResponse( - new IndexCorrelationRuleResponse( - correlationRule.getId(), - correlationRule.getVersion(), - request.getMethod() == RestRequest.Method.POST ? RestStatus.CREATED : RestStatus.OK, - correlationRule - ) - ); - } - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/package-info.java deleted file mode 100644 index 7a47efbb9bb45..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Transport Actions for correlation rules. - */ -package org.opensearch.plugin.correlation.rules.transport; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettings.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettings.java deleted file mode 100644 index 2e2dbbffbeaa2..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettings.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.settings; - -import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.TimeValue; - -import java.util.concurrent.TimeUnit; - -import static org.opensearch.common.settings.Setting.Property.IndexScope; - -/** - * Settings for events-correlation-engine. - * - * @opensearch.api - * @opensearch.experimental - */ -public class EventsCorrelationSettings { - /** - * Correlation Index setting name - */ - public static final String CORRELATION_INDEX = "index.correlation"; - /** - * Boolean setting to check if an OS index is a correlation index. - */ - public static final Setting IS_CORRELATION_INDEX_SETTING = Setting.boolSetting(CORRELATION_INDEX, false, IndexScope); - /** - * Global time window setting for Correlations - */ - public static final Setting CORRELATION_TIME_WINDOW = Setting.positiveTimeSetting( - "plugins.security_analytics.correlation_time_window", - new TimeValue(5, TimeUnit.MINUTES), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - /** - * Default constructor - */ - public EventsCorrelationSettings() {} -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/package-info.java deleted file mode 100644 index 795291cd0de2e..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Settings for events-correlation-engine - */ -package org.opensearch.plugin.correlation.settings; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/CorrelationRuleIndices.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/CorrelationRuleIndices.java deleted file mode 100644 index 3656bd413733a..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/CorrelationRuleIndices.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.utils; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.action.admin.indices.create.CreateIndexRequest; -import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.Settings; -import org.opensearch.core.action.ActionListener; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; - -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.Objects; - -/** - * Correlation Rule Index manager - * - * @opensearch.internal - */ -public class CorrelationRuleIndices { - private static final Logger log = LogManager.getLogger(CorrelationRuleIndices.class); - - private final Client client; - - private final ClusterService clusterService; - - /** - * Parameterized ctor for CorrelationRuleIndices - * @param client OS Client - * @param clusterService ClusterService - */ - public CorrelationRuleIndices(Client client, ClusterService clusterService) { - this.client = client; - this.clusterService = clusterService; - } - - /** - * get correlation rule index mappings - * @return mappings of correlation rule index - * @throws IOException IOException - */ - public static String correlationRuleIndexMappings() throws IOException { - return new String( - Objects.requireNonNull(CorrelationRuleIndices.class.getClassLoader().getResourceAsStream("mappings/correlation-rules.json")) - .readAllBytes(), - Charset.defaultCharset() - ); - } - - /** - * init the correlation rule index - * @param actionListener listener - * @throws IOException IOException - */ - public void initCorrelationRuleIndex(ActionListener actionListener) throws IOException { - if (correlationRuleIndexExists() == false) { - CreateIndexRequest indexRequest = new CreateIndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).mapping( - correlationRuleIndexMappings() - ).settings(Settings.builder().put("index.hidden", true).build()); - client.admin().indices().create(indexRequest, actionListener); - } - } - - /** - * check if correlation rule index exists - * @return boolean - */ - public boolean correlationRuleIndexExists() { - ClusterState clusterState = clusterService.state(); - return clusterState.getRoutingTable().hasIndex(CorrelationRule.CORRELATION_RULE_INDEX); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/IndexUtils.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/IndexUtils.java deleted file mode 100644 index 362be3d2932e3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/IndexUtils.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.utils; - -import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.client.IndicesAdminClient; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.xcontent.MediaTypeRegistry; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentParser; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; - -import static org.opensearch.core.ParseField.CommonFields._META; - -/** - * Index Management utils - * - * @opensearch.internal - */ -public class IndexUtils { - private static final Integer NO_SCHEMA_VERSION = 0; - private static final String SCHEMA_VERSION = "schema_version"; - - /** - * manages the mappings lifecycle for correlation rule index - */ - public static Boolean correlationRuleIndexUpdated = false; - - private IndexUtils() {} - - /** - * updates the status of correlationRuleIndexUpdated to true - */ - public static void correlationRuleIndexUpdated() { - correlationRuleIndexUpdated = true; - } - - /** - * util method which decides based on schema version whether to update an index. - * @param index IndexMetadata - * @param mapping new mappings - * @return Boolean - * @throws IOException IOException - */ - public static Boolean shouldUpdateIndex(IndexMetadata index, String mapping) throws IOException { - Integer oldVersion = NO_SCHEMA_VERSION; - Integer newVersion = getSchemaVersion(mapping); - - Map indexMapping = index.mapping().sourceAsMap(); - if (indexMapping != null - && indexMapping.containsKey(_META.getPreferredName()) - && indexMapping.get(_META.getPreferredName()) instanceof HashMap) { - Map metaData = (HashMap) indexMapping.get(_META.getPreferredName()); - if (metaData.containsKey(SCHEMA_VERSION)) { - oldVersion = (Integer) metaData.get(SCHEMA_VERSION); - } - } - return newVersion > oldVersion; - } - - /** - * Gets the schema version for the mapping - * @param mapping mappings as input - * @return schema version - * @throws IOException IOException - */ - public static Integer getSchemaVersion(String mapping) throws IOException { - XContentParser xcp = MediaTypeRegistry.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, mapping); - - while (!xcp.isClosed()) { - XContentParser.Token token = xcp.currentToken(); - if (token != null && token != XContentParser.Token.END_OBJECT && token != XContentParser.Token.START_OBJECT) { - if (!Objects.equals(xcp.currentName(), _META.getPreferredName())) { - xcp.nextToken(); - xcp.skipChildren(); - } else { - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - switch (xcp.currentName()) { - case SCHEMA_VERSION: - int version = xcp.intValue(); - if (version < 0) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "%s cannot be negative", SCHEMA_VERSION) - ); - } - return version; - default: - xcp.nextToken(); - } - } - } - } - xcp.nextToken(); - } - return NO_SCHEMA_VERSION; - } - - /** - * updates the mappings for the index. - * @param index index for which mapping needs to be updated - * @param mapping new mappings - * @param clusterState ClusterState - * @param client Admin client - * @param actionListener listener - * @throws IOException IOException - */ - public static void updateIndexMapping( - String index, - String mapping, - ClusterState clusterState, - IndicesAdminClient client, - ActionListener actionListener - ) throws IOException { - if (clusterState.metadata().indices().containsKey(index)) { - if (shouldUpdateIndex(clusterState.metadata().index(index), mapping)) { - PutMappingRequest putMappingRequest = new PutMappingRequest(index).source(mapping, MediaTypeRegistry.JSON); - client.putMapping(putMappingRequest, actionListener); - } else { - actionListener.onResponse(new AcknowledgedResponse(true)); - } - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/package-info.java deleted file mode 100644 index 798196c47df20..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * utils package for events-correlation-engine - */ -package org.opensearch.plugin.correlation.utils; diff --git a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec deleted file mode 100644 index 013c17e4a9736..0000000000000 --- a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ /dev/null @@ -1 +0,0 @@ -org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec diff --git a/plugins/events-correlation-engine/src/main/resources/mappings/correlation-rules.json b/plugins/events-correlation-engine/src/main/resources/mappings/correlation-rules.json deleted file mode 100644 index 7741b160eca24..0000000000000 --- a/plugins/events-correlation-engine/src/main/resources/mappings/correlation-rules.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "_meta" : { - "schema_version": 1 - }, - "properties": { - "name": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "correlate": { - "type": "nested", - "properties": { - "index": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "query": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "tags": { - "type": "text", - "fields" : { - "keyword" : { - "type" : "keyword" - } - } - }, - "timestampField": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - } - } - } - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTests.java deleted file mode 100644 index 005ffa2097b03..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTests.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -public class EventsCorrelationPluginTests extends OpenSearchTestCase { - - public void testDummy() { - Assert.assertEquals(1, 1); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContextTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContextTests.java deleted file mode 100644 index 19ce3b33514d8..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContextTests.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.plugin.correlation.core.index.CorrelationParamsContext.PARAMETERS; -import static org.opensearch.plugin.correlation.core.index.CorrelationParamsContext.VECTOR_SIMILARITY_FUNCTION; - -/** - * Unit tests for CorrelationsParamsContext - */ -public class CorrelationParamsContextTests extends OpenSearchTestCase { - - /** - * Test reading from and writing to streams - */ - public void testStreams() throws IOException { - int efConstruction = 321; - int m = 12; - - Map parameters = new HashMap<>(); - parameters.put("m", m); - parameters.put("ef_construction", efConstruction); - - CorrelationParamsContext context = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, parameters); - - BytesStreamOutput streamOutput = new BytesStreamOutput(); - context.writeTo(streamOutput); - - CorrelationParamsContext copy = new CorrelationParamsContext(streamOutput.bytes().streamInput()); - Assert.assertEquals(context.getSimilarityFunction(), copy.getSimilarityFunction()); - Assert.assertEquals(context.getParameters(), copy.getParameters()); - } - - /** - * test get vector similarity function - */ - public void testVectorSimilarityFunction() { - int efConstruction = 321; - int m = 12; - - Map parameters = new HashMap<>(); - parameters.put("m", m); - parameters.put("ef_construction", efConstruction); - - CorrelationParamsContext context = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, parameters); - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN, context.getSimilarityFunction()); - } - - /** - * test get parameters - */ - public void testParameters() { - int efConstruction = 321; - int m = 12; - - Map parameters = new HashMap<>(); - parameters.put("m", m); - parameters.put("ef_construction", efConstruction); - - CorrelationParamsContext context = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, parameters); - Assert.assertEquals(parameters, context.getParameters()); - } - - /** - * test parse method with invalid input - * @throws IOException IOException - */ - public void testParse_Invalid() throws IOException { - // Invalid input type - Integer invalidIn = 12; - expectThrows(MapperParsingException.class, () -> CorrelationParamsContext.parse(invalidIn)); - - // Invalid vector similarity function - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(CorrelationParamsContext.VECTOR_SIMILARITY_FUNCTION, 0) - .endObject(); - - final Map in2 = xContentBuilderToMap(xContentBuilder); - expectThrows(MapperParsingException.class, () -> CorrelationParamsContext.parse(in2)); - - // Invalid parameters - xContentBuilder = XContentFactory.jsonBuilder().startObject().field(PARAMETERS, 0).endObject(); - - final Map in4 = xContentBuilderToMap(xContentBuilder); - expectThrows(MapperParsingException.class, () -> CorrelationParamsContext.parse(in4)); - } - - /** - * test parse with null parameters - * @throws IOException IOException - */ - public void testParse_NullParameters() throws IOException { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(VECTOR_SIMILARITY_FUNCTION, VectorSimilarityFunction.EUCLIDEAN) - .field(PARAMETERS, (String) null) - .endObject(); - Map in = xContentBuilderToMap(xContentBuilder); - Assert.assertThrows(MapperParsingException.class, () -> { CorrelationParamsContext.parse(in); }); - } - - /** - * test parse method - * @throws IOException IOException - */ - public void testParse_Valid() throws IOException { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(VECTOR_SIMILARITY_FUNCTION, VectorSimilarityFunction.EUCLIDEAN) - .startObject(PARAMETERS) - .field("m", 2) - .field("ef_construction", 128) - .endObject() - .endObject(); - - Map in = xContentBuilderToMap(xContentBuilder); - CorrelationParamsContext context = CorrelationParamsContext.parse(in); - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN, context.getSimilarityFunction()); - Assert.assertEquals(Map.of("m", 2, "ef_construction", 128), context.getParameters()); - } - - /** - * test toXContent method - * @throws IOException IOException - */ - public void testToXContent() throws IOException { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(VECTOR_SIMILARITY_FUNCTION, VectorSimilarityFunction.EUCLIDEAN) - .startObject(PARAMETERS) - .field("m", 2) - .field("ef_construction", 128) - .endObject() - .endObject(); - - Map in = xContentBuilderToMap(xContentBuilder); - CorrelationParamsContext context = CorrelationParamsContext.parse(in); - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder = context.toXContent(builder, ToXContent.EMPTY_PARAMS); - - Map out = xContentBuilderToMap(builder); - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN.name(), out.get(VECTOR_SIMILARITY_FUNCTION)); - } - - private Map xContentBuilderToMap(XContentBuilder xContentBuilder) { - return XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/VectorFieldTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/VectorFieldTests.java deleted file mode 100644 index 32c71dcd37196..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/VectorFieldTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.document.FieldType; -import org.opensearch.ExceptionsHelper; -import org.opensearch.OpenSearchException; -import org.opensearch.common.Randomness; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.common.io.stream.BytesStreamInput; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Random; - -/** - * Unit tests for VectorField - */ -public class VectorFieldTests extends OpenSearchTestCase { - - private final Random random = Randomness.get(); - - /** - * test VectorField ctor - */ - public void testVectorField_ctor() { - VectorField field = new VectorField("test-field", new float[] { 1.0f, 1.0f }, new FieldType()); - Assert.assertEquals("test-field", field.name()); - } - - /** - * test float vector to array serializer - * @throws IOException IOException - */ - public void testVectorAsArraySerializer() throws IOException { - final float[] vector = getArrayOfRandomFloats(20); - - final BytesStreamOutput objectStream = new BytesStreamOutput(); - objectStream.writeFloatArray(vector); - final byte[] serializedVector = objectStream.bytes().toBytesRef().bytes; - - final byte[] actualSerializedVector = VectorField.floatToByteArray(vector); - - Assert.assertNotNull(actualSerializedVector); - Assert.assertArrayEquals(serializedVector, actualSerializedVector); - - final float[] actualDeserializedVector = byteToFloatArray(actualSerializedVector); - Assert.assertNotNull(actualDeserializedVector); - Assert.assertArrayEquals(vector, actualDeserializedVector, 0.1f); - } - - /** - * test byte array to float vector failures - */ - public void testByteToFloatArrayFailures() { - final byte[] serializedVector = "test-dummy".getBytes(StandardCharsets.UTF_8); - expectThrows(OpenSearchException.class, () -> { byteToFloatArray(serializedVector); }); - } - - private float[] getArrayOfRandomFloats(int length) { - float[] vector = new float[length]; - for (int i = 0; i < 20; ++i) { - vector[i] = random.nextFloat(); - } - return vector; - } - - private static float[] byteToFloatArray(byte[] byteStream) { - try (BytesStreamInput objectStream = new BytesStreamInput(byteStream)) { - return objectStream.readFloatArray(); - } catch (IOException ex) { - throw ExceptionsHelper.convertToOpenSearchException(ex); - } - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java deleted file mode 100644 index 7223b450a136c..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec.correlation990; - -import org.apache.lucene.codecs.Codec; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.KnnFloatVectorField; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.SerialMergeScheduler; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.store.Directory; -import org.apache.lucene.tests.index.RandomIndexWriter; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; -import org.opensearch.plugin.correlation.core.index.query.CorrelationQueryFactory; -import org.opensearch.test.OpenSearchTestCase; - -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; - -import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_EF_CONSTRUCTION; -import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_M; -import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_9_0; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -/** - * Unit tests for custom correlation codec - */ -public class CorrelationCodecTests extends OpenSearchTestCase { - - private static final String FIELD_NAME_ONE = "test_vector_one"; - private static final String FIELD_NAME_TWO = "test_vector_two"; - - /** - * test correlation vector index - * @throws Exception Exception - */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8329") - public void testCorrelationVectorIndex() throws Exception { - Function perFieldCorrelationVectorsProvider = - mapperService -> new PerFieldCorrelationVectorsFormat(Optional.of(mapperService)); - Function correlationCodecProvider = (correlationVectorsFormat -> new CorrelationCodec( - V_9_9_0.getDefaultCodecDelegate(), - correlationVectorsFormat - )); - testCorrelationVectorIndex(correlationCodecProvider, perFieldCorrelationVectorsProvider); - } - - private void testCorrelationVectorIndex( - final Function codecProvider, - final Function perFieldCorrelationVectorsProvider - ) throws Exception { - final MapperService mapperService = mock(MapperService.class); - final CorrelationParamsContext correlationParamsContext = new CorrelationParamsContext( - VectorSimilarityFunction.EUCLIDEAN, - Map.of(METHOD_PARAMETER_M, 16, METHOD_PARAMETER_EF_CONSTRUCTION, 256) - ); - - final VectorFieldMapper.CorrelationVectorFieldType mappedFieldType1 = new VectorFieldMapper.CorrelationVectorFieldType( - FIELD_NAME_ONE, - Map.of(), - 3, - correlationParamsContext - ); - final VectorFieldMapper.CorrelationVectorFieldType mappedFieldType2 = new VectorFieldMapper.CorrelationVectorFieldType( - FIELD_NAME_TWO, - Map.of(), - 2, - correlationParamsContext - ); - when(mapperService.fieldType(eq(FIELD_NAME_ONE))).thenReturn(mappedFieldType1); - when(mapperService.fieldType(eq(FIELD_NAME_TWO))).thenReturn(mappedFieldType2); - - var perFieldCorrelationVectorsFormatSpy = spy(perFieldCorrelationVectorsProvider.apply(mapperService)); - final Codec codec = codecProvider.apply(perFieldCorrelationVectorsFormatSpy); - - Directory dir = newFSDirectory(createTempDir()); - IndexWriterConfig iwc = newIndexWriterConfig(); - iwc.setMergeScheduler(new SerialMergeScheduler()); - iwc.setCodec(codec); - - final FieldType luceneFieldType = KnnFloatVectorField.createFieldType(3, VectorSimilarityFunction.EUCLIDEAN); - float[] array = { 1.0f, 3.0f, 4.0f }; - KnnFloatVectorField vectorField = new KnnFloatVectorField(FIELD_NAME_ONE, array, luceneFieldType); - RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); - Document doc = new Document(); - doc.add(vectorField); - writer.addDocument(doc); - writer.commit(); - IndexReader reader = writer.getReader(); - writer.close(); - - verify(perFieldCorrelationVectorsFormatSpy).getKnnVectorsFormatForField(eq(FIELD_NAME_ONE)); - - IndexSearcher searcher = new IndexSearcher(reader); - Query query = CorrelationQueryFactory.create( - new CorrelationQueryFactory.CreateQueryRequest("dummy", FIELD_NAME_ONE, new float[] { 1.0f, 0.0f, 0.0f }, 1, null, null) - ); - - assertEquals(1, searcher.count(query)); - - reader.close(); - dir.close(); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapperTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapperTests.java deleted file mode 100644 index 674f35069a742..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapperTests.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.mapper; - -import org.apache.lucene.document.KnnFloatVectorField; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.search.FieldExistsQuery; -import org.opensearch.Version; -import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Explicit; -import org.opensearch.common.settings.IndexScopedSettings; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.mapper.ContentPath; -import org.opensearch.index.mapper.FieldMapper; -import org.opensearch.index.mapper.Mapper; -import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.ParseContext; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.QueryShardException; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.search.lookup.SearchLookup; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.mockito.Mockito; - -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * Unit tests for correlation vector field mapper - */ -public class CorrelationVectorFieldMapperTests extends OpenSearchTestCase { - - private static final String CORRELATION_VECTOR_TYPE = "correlation_vector"; - private static final String DIMENSION_FIELD_NAME = "dimension"; - private static final String TYPE_FIELD_NAME = "type"; - - /** - * test builder construction from parse of correlation params context - * @throws IOException IOException - */ - public void testBuilder_parse_fromCorrelationParamsContext() throws IOException { - String fieldName = "test-field-name"; - String indexName = "test-index-name"; - Settings settings = Settings.builder().put(settings(Version.CURRENT).build()).build(); - - VectorFieldMapper.TypeParser typeParser = new VectorFieldMapper.TypeParser(); - - int efConstruction = 321; - int m = 12; - int dimension = 10; - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, dimension) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", m) - .field("ef_construction", efConstruction) - .endObject() - .endObject() - .endObject(); - - VectorFieldMapper.Builder builder = (VectorFieldMapper.Builder) typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(), - buildParserContext(indexName, settings) - ); - Mapper.BuilderContext builderContext = new Mapper.BuilderContext(settings, new ContentPath()); - builder.build(builderContext); - - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN, builder.correlationParamsContext.getValue().getSimilarityFunction()); - Assert.assertEquals(321, builder.correlationParamsContext.getValue().getParameters().get("ef_construction")); - - XContentBuilder xContentBuilderEmptyParams = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, dimension) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .endObject() - .endObject(); - - VectorFieldMapper.Builder builderEmptyParams = (VectorFieldMapper.Builder) typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilderEmptyParams), true, xContentBuilderEmptyParams.contentType()) - .v2(), - buildParserContext(indexName, settings) - ); - - Assert.assertEquals( - VectorSimilarityFunction.EUCLIDEAN, - builderEmptyParams.correlationParamsContext.getValue().getSimilarityFunction() - ); - Assert.assertTrue(builderEmptyParams.correlationParamsContext.getValue().getParameters().isEmpty()); - } - - /** - * test type parser construction throw error for invalid dimension of correlation vectors - * @throws IOException IOException - */ - public void testTypeParser_parse_fromCorrelationParamsContext_InvalidDimension() throws IOException { - String fieldName = "test-field-name"; - String indexName = "test-index-name"; - Settings settings = Settings.builder().put(settings(Version.CURRENT).build()).build(); - - VectorFieldMapper.TypeParser typeParser = new VectorFieldMapper.TypeParser(); - - int efConstruction = 321; - int m = 12; - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, 2000) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", m) - .field("ef_construction", efConstruction) - .endObject() - .endObject() - .endObject(); - - VectorFieldMapper.Builder builder = (VectorFieldMapper.Builder) typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(), - buildParserContext(indexName, settings) - ); - - expectThrows(IllegalArgumentException.class, () -> builder.build(new Mapper.BuilderContext(settings, new ContentPath()))); - } - - /** - * test type parser construction error for invalid vector similarity function - * @throws IOException IOException - */ - public void testTypeParser_parse_fromCorrelationParamsContext_InvalidVectorSimilarityFunction() throws IOException { - String fieldName = "test-field-name"; - String indexName = "test-index-name"; - Settings settings = Settings.builder().put(settings(Version.CURRENT).build()).build(); - - VectorFieldMapper.TypeParser typeParser = new VectorFieldMapper.TypeParser(); - - int efConstruction = 321; - int m = 12; - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, 2000) - .startObject("correlation_ctx") - .field("similarityFunction", "invalid") - .startObject("parameters") - .field("m", m) - .field("ef_construction", efConstruction) - .endObject() - .endObject() - .endObject(); - - expectThrows( - MapperParsingException.class, - () -> typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(), - buildParserContext(indexName, settings) - ) - ); - } - - /** - * test parseCreateField in CorrelationVectorFieldMapper - * @throws IOException ioexception - */ - public void testCorrelationVectorFieldMapper_parseCreateField() throws IOException { - String fieldName = "test-field-name"; - int dimension = 10; - float[] testVector = createInitializedFloatArray(dimension, 1.0f); - CorrelationParamsContext correlationParamsContext = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, Map.of()); - - VectorFieldMapper.CorrelationVectorFieldType correlationVectorFieldType = new VectorFieldMapper.CorrelationVectorFieldType( - fieldName, - Map.of(), - dimension, - correlationParamsContext - ); - - CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput input = new CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput( - fieldName, - correlationVectorFieldType, - FieldMapper.MultiFields.empty(), - FieldMapper.CopyTo.empty(), - new Explicit<>(true, true), - false, - false, - correlationParamsContext - ); - - ParseContext.Document document = new ParseContext.Document(); - ContentPath contentPath = new ContentPath(); - ParseContext parseContext = mock(ParseContext.class); - when(parseContext.doc()).thenReturn(document); - when(parseContext.path()).thenReturn(contentPath); - - CorrelationVectorFieldMapper correlationVectorFieldMapper = Mockito.spy(new CorrelationVectorFieldMapper(input)); - doReturn(Optional.of(testVector)).when(correlationVectorFieldMapper).getFloatsFromContext(parseContext, dimension); - - correlationVectorFieldMapper.parseCreateField(parseContext, dimension); - - List fields = document.getFields(); - assertEquals(1, fields.size()); - IndexableField field = fields.get(0); - - Assert.assertTrue(field instanceof KnnFloatVectorField); - KnnFloatVectorField knnFloatVectorField = (KnnFloatVectorField) field; - Assert.assertArrayEquals(testVector, knnFloatVectorField.vectorValue(), 0.001f); - } - - /** - * test CorrelationVectorFieldType subclass - */ - public void testCorrelationVectorFieldType() { - String fieldName = "test-field-name"; - int dimension = 10; - QueryShardContext context = mock(QueryShardContext.class); - SearchLookup searchLookup = mock(SearchLookup.class); - - VectorFieldMapper.CorrelationVectorFieldType correlationVectorFieldType = new VectorFieldMapper.CorrelationVectorFieldType( - fieldName, - Map.of(), - dimension - ); - Assert.assertThrows(QueryShardException.class, () -> { correlationVectorFieldType.termQuery(new Object(), context); }); - Assert.assertThrows( - UnsupportedOperationException.class, - () -> { correlationVectorFieldType.valueFetcher(context, searchLookup, ""); } - ); - Assert.assertTrue(correlationVectorFieldType.existsQuery(context) instanceof FieldExistsQuery); - Assert.assertEquals(VectorFieldMapper.CONTENT_TYPE, correlationVectorFieldType.typeName()); - } - - /** - * test constants in VectorFieldMapper - */ - public void testVectorFieldMapperConstants() { - Assert.assertNotNull(VectorFieldMapper.Defaults.IGNORE_MALFORMED); - Assert.assertNotNull(VectorFieldMapper.Names.IGNORE_MALFORMED); - } - - private IndexMetadata buildIndexMetaData(String index, Settings settings) { - return IndexMetadata.builder(index) - .settings(settings) - .numberOfShards(1) - .numberOfReplicas(0) - .version(7) - .mappingVersion(0) - .settingsVersion(0) - .aliasesVersion(0) - .creationDate(0) - .build(); - } - - private Mapper.TypeParser.ParserContext buildParserContext(String index, Settings settings) { - IndexSettings indexSettings = new IndexSettings( - buildIndexMetaData(index, settings), - Settings.EMPTY, - new IndexScopedSettings(Settings.EMPTY, new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS)) - ); - - MapperService mapperService = mock(MapperService.class); - when(mapperService.getIndexSettings()).thenReturn(indexSettings); - - return new Mapper.TypeParser.ParserContext( - null, - mapperService, - type -> new VectorFieldMapper.TypeParser(), - Version.CURRENT, - null, - null, - null - ); - } - - private static float[] createInitializedFloatArray(int dimension, float value) { - float[] array = new float[dimension]; - Arrays.fill(array, value); - return array; - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java deleted file mode 100644 index 3e567d0c04e53..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.query; - -import org.apache.lucene.search.KnnFloatVectorQuery; -import org.opensearch.Version; -import org.opensearch.cluster.ClusterModule; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.common.Strings; -import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; -import org.opensearch.core.common.io.stream.NamedWriteableRegistry; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.index.Index; -import org.opensearch.core.xcontent.MediaTypeRegistry; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.mapper.NumberFieldMapper; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.TermQueryBuilder; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; -import org.opensearch.plugins.SearchPlugin; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.List; -import java.util.Optional; - -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * Unit tests for Correlation Query Builder - */ -public class CorrelationQueryBuilderTests extends OpenSearchTestCase { - - private static final String FIELD_NAME = "myvector"; - private static final int K = 1; - private static final TermQueryBuilder TERM_QUERY = QueryBuilders.termQuery("field", "value"); - private static final float[] QUERY_VECTOR = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; - - /** - * test invalid number of nearby neighbors - */ - public void testInvalidK() { - float[] queryVector = { 1.0f, 1.0f }; - - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, -K)); - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, 0)); - expectThrows( - IllegalArgumentException.class, - () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, CorrelationQueryBuilder.K_MAX + 1) - ); - } - - /** - * test empty vector scenario - */ - public void testEmptyVector() { - final float[] queryVector = null; - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, 1)); - final float[] queryVector1 = new float[] {}; - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector1, 1)); - } - - /** - * test serde with xcontent - * @throws IOException IOException - */ - public void testFromXContent() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.startObject(correlationQueryBuilder.fieldName()); - builder.field(CorrelationQueryBuilder.VECTOR_FIELD.getPreferredName(), correlationQueryBuilder.vector()); - builder.field(CorrelationQueryBuilder.K_FIELD.getPreferredName(), correlationQueryBuilder.getK()); - builder.endObject(); - builder.endObject(); - XContentParser contentParser = createParser(builder); - contentParser.nextToken(); - CorrelationQueryBuilder actualBuilder = CorrelationQueryBuilder.parse(contentParser); - Assert.assertEquals(actualBuilder, correlationQueryBuilder); - } - - /** - * test serde with xcontent - * @throws IOException IOException - */ - public void testFromXContentFromString() throws IOException { - String correlationQuery = "{\n" - + " \"myvector\" : {\n" - + " \"vector\" : [\n" - + " 1.0,\n" - + " 2.0,\n" - + " 3.0,\n" - + " 4.0\n" - + " ],\n" - + " \"k\" : 1,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; - XContentParser contentParser = createParser(JsonXContent.jsonXContent, correlationQuery); - contentParser.nextToken(); - CorrelationQueryBuilder actualBuilder = CorrelationQueryBuilder.parse(contentParser); - Assert.assertEquals(correlationQuery.replace("\n", "").replace(" ", ""), Strings.toString(MediaTypeRegistry.JSON, actualBuilder)); - } - - /** - * test serde with xcontent with filters - * @throws IOException IOException - */ - public void testFromXContentWithFilters() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K, TERM_QUERY); - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.startObject(correlationQueryBuilder.fieldName()); - builder.field(CorrelationQueryBuilder.VECTOR_FIELD.getPreferredName(), correlationQueryBuilder.vector()); - builder.field(CorrelationQueryBuilder.K_FIELD.getPreferredName(), correlationQueryBuilder.getK()); - builder.field(CorrelationQueryBuilder.FILTER_FIELD.getPreferredName(), correlationQueryBuilder.getFilter()); - builder.endObject(); - builder.endObject(); - XContentParser contentParser = createParser(builder); - contentParser.nextToken(); - CorrelationQueryBuilder actualBuilder = CorrelationQueryBuilder.parse(contentParser); - Assert.assertEquals(actualBuilder, correlationQueryBuilder); - } - - /** - * test conversion o KnnFloatVectorQuery logic - * @throws IOException IOException - */ - public void testDoToQuery() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - VectorFieldMapper.CorrelationVectorFieldType mockCorrVectorField = mock(VectorFieldMapper.CorrelationVectorFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockCorrVectorField.getDimension()).thenReturn(4); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - KnnFloatVectorQuery query = (KnnFloatVectorQuery) correlationQueryBuilder.doToQuery(mockQueryShardContext); - Assert.assertEquals(FIELD_NAME, query.getField()); - Assert.assertArrayEquals(QUERY_VECTOR, query.getTargetCopy(), 0.1f); - Assert.assertEquals(K, query.getK()); - } - - /** - * test conversion o KnnFloatVectorQuery logic with filter - * @throws IOException IOException - */ - public void testDoToQueryWithFilter() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K, TERM_QUERY); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - VectorFieldMapper.CorrelationVectorFieldType mockCorrVectorField = mock(VectorFieldMapper.CorrelationVectorFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockCorrVectorField.getDimension()).thenReturn(4); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - KnnFloatVectorQuery query = (KnnFloatVectorQuery) correlationQueryBuilder.doToQuery(mockQueryShardContext); - Assert.assertEquals(FIELD_NAME, query.getField()); - Assert.assertArrayEquals(QUERY_VECTOR, query.getTargetCopy(), 0.1f); - Assert.assertEquals(K, query.getK()); - Assert.assertEquals(TERM_QUERY.toQuery(mockQueryShardContext), query.getFilter()); - } - - /** - * test conversion o KnnFloatVectorQuery logic failure with invalid dimensions - */ - public void testDoToQueryInvalidDimensions() { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - VectorFieldMapper.CorrelationVectorFieldType mockCorrVectorField = mock(VectorFieldMapper.CorrelationVectorFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockCorrVectorField.getDimension()).thenReturn(400); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - expectThrows(IllegalArgumentException.class, () -> correlationQueryBuilder.doToQuery(mockQueryShardContext)); - } - - /** - * test conversion o KnnFloatVectorQuery logic failure with invalid field type - */ - public void testDoToQueryInvalidFieldType() { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - NumberFieldMapper.NumberFieldType mockCorrVectorField = mock(NumberFieldMapper.NumberFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - expectThrows(IllegalArgumentException.class, () -> correlationQueryBuilder.doToQuery(mockQueryShardContext)); - } - - /** - * test serialization of Correlation Query Builder - * @throws Exception throws an IOException if serialization fails - * @throws Exception Exception - */ - public void testSerialization() throws Exception { - assertSerialization(Optional.empty()); - assertSerialization(Optional.of(TERM_QUERY)); - } - - private void assertSerialization(final Optional queryBuilderOptional) throws IOException { - final CorrelationQueryBuilder builder = queryBuilderOptional.isPresent() - ? new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K, queryBuilderOptional.get()) - : new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.CURRENT); - output.writeNamedWriteable(builder); - - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { - in.setVersion(Version.CURRENT); - final QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); - - assertNotNull(deserializedQuery); - assertTrue(deserializedQuery instanceof CorrelationQueryBuilder); - final CorrelationQueryBuilder deserializedKnnQueryBuilder = (CorrelationQueryBuilder) deserializedQuery; - assertEquals(FIELD_NAME, deserializedKnnQueryBuilder.fieldName()); - assertArrayEquals(QUERY_VECTOR, (float[]) deserializedKnnQueryBuilder.vector(), 0.0f); - assertEquals(K, deserializedKnnQueryBuilder.getK()); - if (queryBuilderOptional.isPresent()) { - assertNotNull(deserializedKnnQueryBuilder.getFilter()); - assertEquals(queryBuilderOptional.get(), deserializedKnnQueryBuilder.getFilter()); - } else { - assertNull(deserializedKnnQueryBuilder.getFilter()); - } - } - } - } - - @Override - protected NamedXContentRegistry xContentRegistry() { - List list = ClusterModule.getNamedXWriteables(); - SearchPlugin.QuerySpec spec = new SearchPlugin.QuerySpec<>( - TermQueryBuilder.NAME, - TermQueryBuilder::new, - TermQueryBuilder::fromXContent - ); - list.add(new NamedXContentRegistry.Entry(QueryBuilder.class, spec.getName(), (p, c) -> spec.getParser().fromXContent(p))); - NamedXContentRegistry registry = new NamedXContentRegistry(list); - return registry; - } - - @Override - protected NamedWriteableRegistry writableRegistry() { - final List entries = ClusterModule.getNamedWriteables(); - entries.add( - new NamedWriteableRegistry.Entry( - QueryBuilder.class, - CorrelationQueryBuilder.NAME_FIELD.getPreferredName(), - CorrelationQueryBuilder::new - ) - ); - entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new)); - return new NamedWriteableRegistry(entries); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettingsTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettingsTests.java deleted file mode 100644 index 45cb47b05b5c2..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettingsTests.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.settings; - -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.plugin.correlation.EventsCorrelationPlugin; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; -import org.junit.Before; - -import java.util.List; -import java.util.concurrent.TimeUnit; - -/** - * Unit tests for Correlation Engine settings - */ -public class EventsCorrelationSettingsTests extends OpenSearchTestCase { - - private EventsCorrelationPlugin plugin; - - @Before - public void setup() { - plugin = new EventsCorrelationPlugin(); - } - - /** - * test all plugin settings returned - */ - public void testAllPluginSettingsReturned() { - List expectedSettings = List.of( - EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING, - EventsCorrelationSettings.CORRELATION_TIME_WINDOW - ); - - List> settings = plugin.getSettings(); - Assert.assertTrue(settings.containsAll(expectedSettings)); - } - - /** - * test settings get value - */ - public void testSettingsGetValue() { - Settings settings = Settings.builder().put("index.correlation", true).build(); - Assert.assertEquals(EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING.get(settings), true); - settings = Settings.builder() - .put("plugins.security_analytics.correlation_time_window", new TimeValue(10, TimeUnit.MINUTES)) - .build(); - Assert.assertEquals(EventsCorrelationSettings.CORRELATION_TIME_WINDOW.get(settings), new TimeValue(10, TimeUnit.MINUTES)); - } -} From 7050ecf21c8dcecc663f95a0ca68bfa91a3f9030 Mon Sep 17 00:00:00 2001 From: kkewwei Date: Fri, 20 Dec 2024 05:57:34 +0800 Subject: [PATCH 05/27] Fix Flaky Test SearchTimeoutIT.testSimpleTimeout (#16828) Signed-off-by: kkewwei --- .../java/org/opensearch/search/SearchTimeoutIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java index ef7da395d2151..79caef1f45a26 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java @@ -82,8 +82,7 @@ protected Settings nodeSettings(int nodeOrdinal) { } public void testSimpleTimeout() throws Exception { - final int numDocs = 1000; - for (int i = 0; i < numDocs; i++) { + for (int i = 0; i < 32; i++) { client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); } refresh("test"); From 45d8b0a06bb51b9392228266232ca5b9c71bb483 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 09:28:31 -0500 Subject: [PATCH 06/27] Bump com.nimbusds:oauth2-oidc-sdk from 11.19.1 to 11.20.1 in /plugins/repository-azure (#16895) * Bump com.nimbusds:oauth2-oidc-sdk in /plugins/repository-azure Bumps [com.nimbusds:oauth2-oidc-sdk](https://bitbucket.org/connect2id/oauth-2.0-sdk-with-openid-connect-extensions) from 11.19.1 to 11.20.1. - [Changelog](https://bitbucket.org/connect2id/oauth-2.0-sdk-with-openid-connect-extensions/src/master/CHANGELOG.txt) - [Commits](https://bitbucket.org/connect2id/oauth-2.0-sdk-with-openid-connect-extensions/branches/compare/11.20.1..11.19.1) --- updated-dependencies: - dependency-name: com.nimbusds:oauth2-oidc-sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + plugins/repository-azure/build.gradle | 2 +- .../repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 | 1 - .../repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 create mode 100644 plugins/repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f439e48ecab7..c13c6c59d8a49 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.gradle.develocity` from 3.18.2 to 3.19 ([#16855](https://github.com/opensearch-project/OpenSearch/pull/16855)) - Bump `org.jline:jline` from 3.27.1 to 3.28.0 ([#16857](https://github.com/opensearch-project/OpenSearch/pull/16857)) - Bump `com.azure:azure-core` from 1.51.0 to 1.54.1 ([#16856](https://github.com/opensearch-project/OpenSearch/pull/16856)) +- Bump `com.nimbusds:oauth2-oidc-sdk` from 11.19.1 to 11.20.1 ([#16895](https://github.com/opensearch-project/OpenSearch/pull/16895)) ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index aebdd3e3b7f63..543b374776c95 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -62,7 +62,7 @@ dependencies { api 'com.microsoft.azure:msal4j-persistence-extension:1.3.0' api "net.java.dev.jna:jna-platform:${versions.jna}" api 'com.microsoft.azure:msal4j:1.17.2' - api 'com.nimbusds:oauth2-oidc-sdk:11.19.1' + api 'com.nimbusds:oauth2-oidc-sdk:11.20.1' api 'com.nimbusds:nimbus-jose-jwt:9.41.1' api 'com.nimbusds:content-type:2.3' api 'com.nimbusds:lang-tag:1.7' diff --git a/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 b/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 deleted file mode 100644 index 7d83b0e8ca639..0000000000000 --- a/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -58db85a807a56ae76baffa519772271ad5808195 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 b/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 new file mode 100644 index 0000000000000..7527d31eb1d37 --- /dev/null +++ b/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 @@ -0,0 +1 @@ +8d1ecd62d31945534a7cd63062c3c48ff0df9c43 \ No newline at end of file From 05ab37d931e170364af04848f85347b928200d71 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 10:51:08 -0500 Subject: [PATCH 07/27] Bump com.netflix.nebula.ospackage-base from 11.10.0 to 11.10.1 in /distribution/packages (#16896) * Bump com.netflix.nebula.ospackage-base in /distribution/packages Bumps com.netflix.nebula.ospackage-base from 11.10.0 to 11.10.1. --- updated-dependencies: - dependency-name: com.netflix.nebula.ospackage-base dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + distribution/packages/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c13c6c59d8a49..35f48e30aecb8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -49,6 +49,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.jline:jline` from 3.27.1 to 3.28.0 ([#16857](https://github.com/opensearch-project/OpenSearch/pull/16857)) - Bump `com.azure:azure-core` from 1.51.0 to 1.54.1 ([#16856](https://github.com/opensearch-project/OpenSearch/pull/16856)) - Bump `com.nimbusds:oauth2-oidc-sdk` from 11.19.1 to 11.20.1 ([#16895](https://github.com/opensearch-project/OpenSearch/pull/16895)) +- Bump `com.netflix.nebula.ospackage-base` from 11.10.0 to 11.10.1 ([#16896](https://github.com/opensearch-project/OpenSearch/pull/16896)) ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 25af649bb4aed..e1fa4de5a0caa 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -63,7 +63,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.10.0" + id "com.netflix.nebula.ospackage-base" version "11.10.1" } void addProcessFilesTask(String type, boolean jdk) { From 38a4112bd119d4e1d51367abe55d0fbad7a38bf8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 13:38:52 -0500 Subject: [PATCH 08/27] Bump ch.qos.logback:logback-classic from 1.5.12 to 1.5.15 in /test/fixtures/hdfs-fixture (#16898) * Bump ch.qos.logback:logback-classic in /test/fixtures/hdfs-fixture Bumps [ch.qos.logback:logback-classic](https://github.com/qos-ch/logback) from 1.5.12 to 1.5.15. - [Commits](https://github.com/qos-ch/logback/compare/v_1.5.12...v_1.5.15) --- updated-dependencies: - dependency-name: ch.qos.logback:logback-classic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 2 +- test/fixtures/hdfs-fixture/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 35f48e30aecb8..bd57d918eb390 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,7 +42,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `codecov/codecov-action` from 4 to 5 ([#16667](https://github.com/opensearch-project/OpenSearch/pull/16667)) - Bump `org.apache.logging.log4j:log4j-core` from 2.24.1 to 2.24.3 ([#16718](https://github.com/opensearch-project/OpenSearch/pull/16718), [#16858](https://github.com/opensearch-project/OpenSearch/pull/16858)) - Bump `jackson` from 2.17.2 to 2.18.2 ([#16733](https://github.com/opensearch-project/OpenSearch/pull/16733)) -- Bump `ch.qos.logback:logback-classic` from 1.2.13 to 1.5.12 ([#16716](https://github.com/opensearch-project/OpenSearch/pull/16716)) +- Bump `ch.qos.logback:logback-classic` from 1.2.13 to 1.5.15 ([#16716](https://github.com/opensearch-project/OpenSearch/pull/16716), [#16898](https://github.com/opensearch-project/OpenSearch/pull/16898)) - Bump `com.azure:azure-identity` from 1.13.2 to 1.14.2 ([#16778](https://github.com/opensearch-project/OpenSearch/pull/16778)) - Bump Apache Lucene from 9.12.0 to 9.12.1 ([#16846](https://github.com/opensearch-project/OpenSearch/pull/16846)) - Bump `com.gradle.develocity` from 3.18.2 to 3.19 ([#16855](https://github.com/opensearch-project/OpenSearch/pull/16855)) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index d645171d9f8e4..06b35a2622cf4 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -75,7 +75,7 @@ dependencies { api "org.apache.commons:commons-text:1.12.0" api "commons-net:commons-net:3.11.1" api "ch.qos.logback:logback-core:1.5.12" - api "ch.qos.logback:logback-classic:1.5.12" + api "ch.qos.logback:logback-classic:1.5.15" api "org.jboss.xnio:xnio-nio:3.8.16.Final" api 'org.jline:jline:3.28.0' api 'org.apache.commons:commons-configuration2:2.11.0' From 6b41e4f4ca5ebdf606470c517fe551ab6659d4e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 16:20:09 -0500 Subject: [PATCH 09/27] Bump lycheeverse/lychee-action from 2.1.0 to 2.2.0 (#16897) * Bump lycheeverse/lychee-action from 2.1.0 to 2.2.0 Bumps [lycheeverse/lychee-action](https://github.com/lycheeverse/lychee-action) from 2.1.0 to 2.2.0. - [Release notes](https://github.com/lycheeverse/lychee-action/releases) - [Commits](https://github.com/lycheeverse/lychee-action/compare/v2.1.0...v2.2.0) --- updated-dependencies: - dependency-name: lycheeverse/lychee-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- .github/workflows/links.yml | 2 +- CHANGELOG.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 3697750dab97a..923c82028cd1b 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: lychee Link Checker id: lychee - uses: lycheeverse/lychee-action@v2.1.0 + uses: lycheeverse/lychee-action@v2.2.0 with: args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes fail: true diff --git a/CHANGELOG.md b/CHANGELOG.md index bd57d918eb390..44236c4bc25c0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,7 +34,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.hadoop:hadoop-minicluster` from 3.4.0 to 3.4.1 ([#16550](https://github.com/opensearch-project/OpenSearch/pull/16550)) - Bump `org.apache.xmlbeans:xmlbeans` from 5.2.1 to 5.3.0 ([#16612](https://github.com/opensearch-project/OpenSearch/pull/16612), [#16854](https://github.com/opensearch-project/OpenSearch/pull/16854)) - Bump `com.nimbusds:nimbus-jose-jwt` from 9.41.1 to 9.47 ([#16611](https://github.com/opensearch-project/OpenSearch/pull/16611), [#16807](https://github.com/opensearch-project/OpenSearch/pull/16807)) -- Bump `lycheeverse/lychee-action` from 2.0.2 to 2.1.0 ([#16610](https://github.com/opensearch-project/OpenSearch/pull/16610)) +- Bump `lycheeverse/lychee-action` from 2.0.2 to 2.2.0 ([#16610](https://github.com/opensearch-project/OpenSearch/pull/16610), [#16897](https://github.com/opensearch-project/OpenSearch/pull/16897)) - Bump `me.champeau.gradle.japicmp` from 0.4.4 to 0.4.5 ([#16614](https://github.com/opensearch-project/OpenSearch/pull/16614)) - Bump `mockito` from 5.14.1 to 5.14.2, `objenesis` from 3.2 to 3.3 and `bytebuddy` from 1.15.4 to 1.15.10 ([#16655](https://github.com/opensearch-project/OpenSearch/pull/16655)) - Bump `Netty` from 4.1.114.Final to 4.1.115.Final ([#16661](https://github.com/opensearch-project/OpenSearch/pull/16661)) From 54ae54a2fc7b2131285e0405186600ea5fa4b220 Mon Sep 17 00:00:00 2001 From: Finn Date: Tue, 24 Dec 2024 06:29:30 -0800 Subject: [PATCH 10/27] Create sub directories for ThirdPartyAudit dependency metadata (#16844) * Extract jars to sub dirs during thirdPartyAudit task. Signed-off-by: Finn Carroll * Change regex to split on '-'/'.'. Ignore version. Signed-off-by: Finn Carroll * Split on .jar for sub folder prefix. Signed-off-by: Finn Carroll --------- Signed-off-by: Finn Carroll --- .../gradle/precommit/ThirdPartyAuditTask.java | 33 ++++++++++++++----- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java index a74781ac44720..6842f0e541abe 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java @@ -229,8 +229,7 @@ public Set getJarsToScan() { @TaskAction public void runThirdPartyAudit() throws IOException { Set jars = getJarsToScan(); - - extractJars(jars); + Set extractedJars = extractJars(jars); final String forbiddenApisOutput = runForbiddenAPIsCli(); @@ -248,7 +247,7 @@ public void runThirdPartyAudit() throws IOException { Set jdkJarHellClasses = null; if (this.jarHellEnabled) { - jdkJarHellClasses = runJdkJarHellCheck(); + jdkJarHellClasses = runJdkJarHellCheck(extractedJars); } if (missingClassExcludes != null) { @@ -301,16 +300,26 @@ private void logForbiddenAPIsOutput(String forbiddenApisOutput) { getLogger().error("Forbidden APIs output:\n{}==end of forbidden APIs==", forbiddenApisOutput); } - private void extractJars(Set jars) { + /** + * Extract project jars to build directory as specified by getJarExpandDir. + * Handle multi release jars by keeping versions closest to `targetCompatibility` version. + * @param jars to extract to build dir + * @return File set of extracted jars + */ + private Set extractJars(Set jars) { + Set extractedJars = new TreeSet<>(); File jarExpandDir = getJarExpandDir(); // We need to clean up to make sure old dependencies don't linger getProject().delete(jarExpandDir); jars.forEach(jar -> { + String jarPrefix = jar.getName().replace(".jar", ""); + File jarSubDir = new File(jarExpandDir, jarPrefix); + extractedJars.add(jarSubDir); FileTree jarFiles = getProject().zipTree(jar); getProject().copy(spec -> { spec.from(jarFiles); - spec.into(jarExpandDir); + spec.into(jarSubDir); // exclude classes from multi release jars spec.exclude("META-INF/versions/**"); }); @@ -329,7 +338,7 @@ private void extractJars(Set jars) { Integer.parseInt(targetCompatibility.get().getMajorVersion()) ).forEach(majorVersion -> getProject().copy(spec -> { spec.from(getProject().zipTree(jar)); - spec.into(jarExpandDir); + spec.into(jarSubDir); String metaInfPrefix = "META-INF/versions/" + majorVersion; spec.include(metaInfPrefix + "/**"); // Drop the version specific prefix @@ -337,6 +346,8 @@ private void extractJars(Set jars) { spec.setIncludeEmptyDirs(false); })); }); + + return extractedJars; } private void assertNoJarHell(Set jdkJarHellClasses) { @@ -398,7 +409,12 @@ private String runForbiddenAPIsCli() throws IOException { return forbiddenApisOutput; } - private Set runJdkJarHellCheck() throws IOException { + /** + * Execute java with JDK_JAR_HELL_MAIN_CLASS against provided jars with OpenSearch core in the classpath. + * @param jars to scan for jarHell violations. + * @return standard out of jarHell process. + */ + private Set runJdkJarHellCheck(Set jars) throws IOException { ByteArrayOutputStream standardOut = new ByteArrayOutputStream(); InjectedExecOps execOps = getProject().getObjects().newInstance(InjectedExecOps.class); ExecResult execResult = execOps.getExecOps().javaexec(spec -> { @@ -407,9 +423,8 @@ private Set runJdkJarHellCheck() throws IOException { getRuntimeConfiguration(), getProject().getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME) ); - spec.getMainClass().set(JDK_JAR_HELL_MAIN_CLASS); - spec.args(getJarExpandDir()); + spec.args(jars); spec.setIgnoreExitValue(true); if (javaHome != null) { spec.setExecutable(javaHome + "/bin/java"); From 8ea0c80e90caee8fdbe161c9483ea7a6467b1b63 Mon Sep 17 00:00:00 2001 From: kkewwei Date: Sat, 28 Dec 2024 07:29:45 +0800 Subject: [PATCH 11/27] Retrieve value from DocValues in a flat_object filed (#16802) --- CHANGELOG.md | 1 + .../92_flat_object_support_doc_values.yml | 52 ++++++++++++++- .../search/fields/SearchFieldsIT.java | 22 +++---- .../index/mapper/DocValueFetcher.java | 6 +- .../index/mapper/FlatObjectFieldMapper.java | 63 ++++++++++++++++++- .../mapper/FlatObjectFieldMapperTests.java | 22 +++++++ 6 files changed, 150 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 44236c4bc25c0..a503728a44fda 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Added a precaution to handle extreme date values during sorting to prevent `arithmetic_exception: long overflow` ([#16812](https://github.com/opensearch-project/OpenSearch/pull/16812)). - Add search replica stats to segment replication stats API ([#16678](https://github.com/opensearch-project/OpenSearch/pull/16678)) - Introduce a setting to disable download of full cluster state from remote on term mismatch([#16798](https://github.com/opensearch-project/OpenSearch/pull/16798/)) +- Added ability to retrieve value from DocValues in a flat_object filed([#16802](https://github.com/opensearch-project/OpenSearch/pull/16802)) ### Dependencies - Bump `com.google.cloud:google-cloud-core-http` from 2.23.0 to 2.47.0 ([#16504](https://github.com/opensearch-project/OpenSearch/pull/16504)) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml index 9ec39660a4928..c840276ee1157 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml @@ -1,8 +1,9 @@ --- # The test setup includes: -# - Create flat_object mapping for flat_object_doc_values_test index -# - Index 9 example documents -# - Search tests about doc_values and index +# - 1.Create flat_object mapping for flat_object_doc_values_test index +# - 2.Index 9 example documents +# - 3.Search tests about doc_values and index +# - 4.Fetch doc_value from flat_object field setup: - skip: @@ -786,3 +787,48 @@ teardown: - length: { hits.hits: 1 } - match: { hits.hits.0._source.order: "order8" } + + # Stored Fields with exact dot path. + - do: + search: + body: { + _source: false, + query: { + bool: { + must: [ + { + term: { + order: "order0" + } + } + ] + } + }, + stored_fields: "_none_", + docvalue_fields: [ "issue.labels.name","order" ] + } + + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields: { "order" : [ "order0" ], "issue.labels.name": [ "abc0" ] } } + + - do: + search: + body: { + _source: false, + query: { + bool: { + must: [ + { + term: { + order: "order0" + } + } + ] + } + }, + stored_fields: "_none_", + docvalue_fields: [ "issue.labels.name" ] + } + + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields: { "issue.labels.name": [ "abc0" ] } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index 2ce96092203e8..60a6e59014e11 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -1023,7 +1023,7 @@ public void testDocValueFields() throws Exception { .startObject("ip_field") .field("type", "ip") .endObject() - .startObject("flat_object_field") + .startObject("flat_object_field1") .field("type", "flat_object") .endObject() .endObject() @@ -1050,9 +1050,11 @@ public void testDocValueFields() throws Exception { .field("boolean_field", true) .field("binary_field", new byte[] { 42, 100 }) .field("ip_field", "::1") - .field("flat_object_field") + .field("flat_object_field1") .startObject() + .field("fooa", "bara") .field("foo", "bar") + .field("foob", "barb") .endObject() .endObject() ) @@ -1075,7 +1077,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("boolean_field") .addDocValueField("binary_field") .addDocValueField("ip_field") - .addDocValueField("flat_object_field"); + .addDocValueField("flat_object_field1.foo"); SearchResponse searchResponse = builder.get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -1097,7 +1099,7 @@ public void testDocValueFields() throws Exception { "keyword_field", "binary_field", "ip_field", - "flat_object_field" + "flat_object_field1.foo" ) ) ); @@ -1116,7 +1118,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ")); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); - assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field").getValue(), equalTo("flat_object_field.foo")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field1.foo").getValue(), equalTo("bar")); builder = client().prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"); searchResponse = builder.get(); @@ -1139,8 +1141,7 @@ public void testDocValueFields() throws Exception { "text_field", "keyword_field", "binary_field", - "ip_field", - "flat_object_field" + "ip_field" ) ) ); @@ -1160,7 +1161,6 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ")); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); - assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field").getValue(), equalTo("flat_object_field.foo")); builder = client().prepareSearch() .setQuery(matchAllQuery()) @@ -1176,7 +1176,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("boolean_field", "use_field_mapping") .addDocValueField("binary_field", "use_field_mapping") .addDocValueField("ip_field", "use_field_mapping") - .addDocValueField("flat_object_field", "use_field_mapping"); + .addDocValueField("flat_object_field1.foo", null); ; searchResponse = builder.get(); @@ -1199,7 +1199,7 @@ public void testDocValueFields() throws Exception { "keyword_field", "binary_field", "ip_field", - "flat_object_field" + "flat_object_field1.foo" ) ) ); @@ -1219,7 +1219,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ")); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); - assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field").getValue(), equalTo("flat_object_field.foo")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field1.foo").getValue(), equalTo("bar")); builder = client().prepareSearch() .setQuery(matchAllQuery()) diff --git a/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java b/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java index 827792cdb1091..48da9b30ac1b0 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java @@ -43,6 +43,7 @@ import java.util.List; import static java.util.Collections.emptyList; +import static org.opensearch.index.mapper.FlatObjectFieldMapper.DOC_VALUE_NO_MATCH; /** * Value fetcher that loads from doc values. @@ -70,7 +71,10 @@ public List fetchValues(SourceLookup lookup) throws IOException { } List result = new ArrayList(leaf.docValueCount()); for (int i = 0, count = leaf.docValueCount(); i < count; ++i) { - result.add(leaf.nextValue()); + Object value = leaf.nextValue(); + if (value != DOC_VALUE_NO_MATCH) { + result.add(value); + } } return result; } diff --git a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java index 0ccdb40f9d33a..13063a4761006 100644 --- a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java @@ -28,6 +28,7 @@ import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.JsonToStringXContentParser; import org.opensearch.core.common.ParsingException; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; @@ -36,11 +37,13 @@ import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.opensearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.support.CoreValuesSourceType; import org.opensearch.search.lookup.SearchLookup; import java.io.IOException; import java.io.UncheckedIOException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -63,6 +66,7 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper { public static final String CONTENT_TYPE = "flat_object"; + public static final Object DOC_VALUE_NO_MATCH = new Object(); /** * In flat_object field mapper, field type is similar to keyword field type @@ -272,7 +276,7 @@ NamedAnalyzer normalizer() { @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { failIfNoDocValues(); - return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.BYTES); + return new SortedSetOrdinalsIndexFieldData.Builder(valueFieldType().name(), CoreValuesSourceType.BYTES); } @Override @@ -304,6 +308,30 @@ protected String parseSourceValue(Object value) { }; } + @Override + public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); + } + if (timeZone != null) { + throw new IllegalArgumentException( + "Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones" + ); + } + if (mappedFieldTypeName != null) { + return new FlatObjectDocValueFormat(mappedFieldTypeName + DOT_SYMBOL + name() + EQUAL_SYMBOL); + } else { + throw new IllegalArgumentException( + "Field [" + name() + "] of type [" + typeName() + "] does not support doc_value in root field" + ); + } + } + + @Override + public boolean isAggregatable() { + return false; + } + @Override public Object valueForDisplay(Object value) { if (value == null) { @@ -530,6 +558,39 @@ public Query wildcardQuery( return valueFieldType().wildcardQuery(rewriteValue(value), method, caseInsensitve, context); } + /** + * A doc_value formatter for flat_object field. + */ + public class FlatObjectDocValueFormat implements DocValueFormat { + private static final String NAME = "flat_object"; + private final String prefix; + + public FlatObjectDocValueFormat(String prefix) { + this.prefix = prefix; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) {} + + @Override + public Object format(BytesRef value) { + String parsedValue = inputToString(value); + if (parsedValue.startsWith(prefix) == false) { + return DOC_VALUE_NO_MATCH; + } + return parsedValue.substring(prefix.length()); + } + + @Override + public BytesRef parseBytesRef(String value) { + return new BytesRef((String) valueFieldType.rewriteForDocValue(rewriteValue(value))); + } + } } private final ValueFieldMapper valueFieldMapper; diff --git a/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java index afd9e994ce3ae..7e6aa00c87290 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java @@ -21,6 +21,7 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.DocValueFormat; import java.io.IOException; @@ -397,6 +398,27 @@ public void testDeduplicationValue() throws IOException { assertEquals(new BytesRef("field.labels=3"), fieldValueAndPaths[4].binaryValue()); } + public void testFetchDocValues() throws IOException { + MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "flat_object"))); + { + // test valueWithPathField + MappedFieldType ft = mapperService.fieldType("field.name"); + DocValueFormat format = ft.docValueFormat(null, null); + String storedValue = "field.field.name=1234"; + + Object object = format.format(new BytesRef(storedValue)); + assertEquals("1234", object); + } + + { + // test valueField + MappedFieldType ft = mapperService.fieldType("field"); + Throwable throwable = assertThrows(IllegalArgumentException.class, () -> ft.docValueFormat(null, null)); + assertEquals("Field [field] of type [flat_object] does not support doc_value in root field", throwable.getMessage()); + } + + } + @Override protected void registerParameters(ParameterChecker checker) throws IOException { // In the future we will want to make sure parameter updates are covered. From 1352bbf0e50a590e02759a41e000e656a2c7203c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Dec 2024 10:14:30 -0500 Subject: [PATCH 12/27] Bump com.microsoft.azure:msal4j from 1.17.2 to 1.18.0 in /plugins/repository-azure (#16918) * Bump com.microsoft.azure:msal4j in /plugins/repository-azure Bumps [com.microsoft.azure:msal4j](https://github.com/AzureAD/microsoft-authentication-library-for-java) from 1.17.2 to 1.18.0. - [Release notes](https://github.com/AzureAD/microsoft-authentication-library-for-java/releases) - [Changelog](https://github.com/AzureAD/microsoft-authentication-library-for-java/blob/dev/changelog.txt) - [Commits](https://github.com/AzureAD/microsoft-authentication-library-for-java/compare/v1.17.2...v1.18.0) --- updated-dependencies: - dependency-name: com.microsoft.azure:msal4j dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 | 1 - plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 create mode 100644 plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index a503728a44fda..3f0eed5f5bbc8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -51,6 +51,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.azure:azure-core` from 1.51.0 to 1.54.1 ([#16856](https://github.com/opensearch-project/OpenSearch/pull/16856)) - Bump `com.nimbusds:oauth2-oidc-sdk` from 11.19.1 to 11.20.1 ([#16895](https://github.com/opensearch-project/OpenSearch/pull/16895)) - Bump `com.netflix.nebula.ospackage-base` from 11.10.0 to 11.10.1 ([#16896](https://github.com/opensearch-project/OpenSearch/pull/16896)) +- Bump `com.microsoft.azure:msal4j` from 1.17.2 to 1.18.0 ([#16918](https://github.com/opensearch-project/OpenSearch/pull/16918)) ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 543b374776c95..03ea07623dbaf 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -61,7 +61,7 @@ dependencies { // Start of transitive dependencies for azure-identity api 'com.microsoft.azure:msal4j-persistence-extension:1.3.0' api "net.java.dev.jna:jna-platform:${versions.jna}" - api 'com.microsoft.azure:msal4j:1.17.2' + api 'com.microsoft.azure:msal4j:1.18.0' api 'com.nimbusds:oauth2-oidc-sdk:11.20.1' api 'com.nimbusds:nimbus-jose-jwt:9.41.1' api 'com.nimbusds:content-type:2.3' diff --git a/plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 b/plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 deleted file mode 100644 index b5219ee17e9fa..0000000000000 --- a/plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a6211e3d71d0388929babaa0ff0951b30d001852 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 b/plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 new file mode 100644 index 0000000000000..292259e9d862d --- /dev/null +++ b/plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 @@ -0,0 +1 @@ +a47e4e9257a5d9cdb8282c331278492968e06250 \ No newline at end of file From 7ae66d0d63308b009927647ce0a892231f1fad29 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Dec 2024 14:20:44 -0500 Subject: [PATCH 13/27] Bump org.apache.commons:commons-text from 1.12.0 to 1.13.0 in /test/fixtures/hdfs-fixture (#16919) * Bump org.apache.commons:commons-text in /test/fixtures/hdfs-fixture Bumps org.apache.commons:commons-text from 1.12.0 to 1.13.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-text dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + test/fixtures/hdfs-fixture/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3f0eed5f5bbc8..e9e57a09704b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -52,6 +52,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.nimbusds:oauth2-oidc-sdk` from 11.19.1 to 11.20.1 ([#16895](https://github.com/opensearch-project/OpenSearch/pull/16895)) - Bump `com.netflix.nebula.ospackage-base` from 11.10.0 to 11.10.1 ([#16896](https://github.com/opensearch-project/OpenSearch/pull/16896)) - Bump `com.microsoft.azure:msal4j` from 1.17.2 to 1.18.0 ([#16918](https://github.com/opensearch-project/OpenSearch/pull/16918)) +- Bump `org.apache.commons:commons-text` from 1.12.0 to 1.13.0 ([#16919](https://github.com/opensearch-project/OpenSearch/pull/16919)) ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 06b35a2622cf4..02aab575bbaf0 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -72,7 +72,7 @@ dependencies { api "org.eclipse.jetty:jetty-server:${versions.jetty}" api "org.eclipse.jetty.websocket:javax-websocket-server-impl:${versions.jetty}" api 'org.apache.zookeeper:zookeeper:3.9.3' - api "org.apache.commons:commons-text:1.12.0" + api "org.apache.commons:commons-text:1.13.0" api "commons-net:commons-net:3.11.1" api "ch.qos.logback:logback-core:1.5.12" api "ch.qos.logback:logback-classic:1.5.15" From 7a0e8fbed7116173e11f52236c347affd4e22a16 Mon Sep 17 00:00:00 2001 From: Finn Date: Tue, 31 Dec 2024 10:00:58 -0800 Subject: [PATCH 14/27] Add gRPC server as transport-grpc plugin (#16534) Introduce auxiliary transport to NetworkPlugin and add gRPC plugin. Auxiliary transports are optional lifecycle components provided by network plugins which run in parallel to the http server/native transport. They are distinct from the existing NetworkPlugin interfaces of 'getTransports' and 'getHttpTransports' as auxiliary transports are optional. Each AuxTransport implements it's own 'aux.transport.type' and 'aux.transport..ports' setting. Since Security.java initializes previous to Node.java during bootstrap socket binding permissions are granted based on 'aux.transport..ports' for each enabled 'aux.transport.type', falling back to a default if no ports are specified. Signed-off-by: Finn Carroll --- CHANGELOG.md | 1 + gradle/libs.versions.toml | 2 +- .../licenses/grpc-api-1.68.0.jar.sha1 | 1 - .../licenses/grpc-api-1.68.2.jar.sha1 | 1 + .../licenses/grpc-api-1.68.0.jar.sha1 | 1 - .../licenses/grpc-api-1.68.2.jar.sha1 | 1 + plugins/transport-grpc/build.gradle | 168 +++++++++++ .../error_prone_annotations-2.24.1.jar.sha1 | 1 + .../error_prone_annotations-LICENSE.txt | 202 +++++++++++++ .../error_prone_annotations-NOTICE.txt | 0 .../licenses/failureaccess-1.0.1.jar.sha1 | 1 + .../licenses/failureaccess-LICENSE.txt | 202 +++++++++++++ .../licenses/failureaccess-NOTICE.txt | 0 .../transport-grpc/licenses/grpc-LICENSE.txt | 202 +++++++++++++ .../transport-grpc/licenses/grpc-NOTICE.txt | 62 ++++ .../licenses/grpc-api-1.68.2.jar.sha1 | 1 + .../licenses/grpc-core-1.68.2.jar.sha1 | 1 + .../grpc-netty-shaded-1.68.2.jar.sha1 | 1 + .../licenses/grpc-protobuf-1.68.2.jar.sha1 | 1 + .../grpc-protobuf-lite-1.68.2.jar.sha1 | 1 + .../licenses/grpc-services-1.68.2.jar.sha1 | 1 + .../licenses/grpc-stub-1.68.2.jar.sha1 | 1 + .../licenses/grpc-util-1.68.2.jar.sha1 | 1 + .../licenses/guava-33.2.1-jre.jar.sha1 | 1 + .../transport-grpc/licenses/guava-LICENSE.txt | 202 +++++++++++++ .../transport-grpc/licenses/guava-NOTICE.txt | 0 .../licenses/perfmark-api-0.26.0.jar.sha1 | 1 + .../licenses/perfmark-api-LICENSE.txt | 201 +++++++++++++ .../licenses/perfmark-api-NOTICE.txt | 40 +++ .../opensearch/transport/grpc/GrpcPlugin.java | 69 +++++ .../grpc/Netty4GrpcServerTransport.java | 277 ++++++++++++++++++ .../transport/grpc/package-info.java | 13 + .../plugin-metadata/plugin-security.policy | 18 ++ .../grpc/Netty4GrpcServerTransportTests.java | 49 ++++ .../org/opensearch/bootstrap/Security.java | 29 ++ .../common/network/NetworkModule.java | 42 +++ .../common/settings/ClusterSettings.java | 2 + .../http/AbstractHttpServerTransport.java | 63 ++-- .../main/java/org/opensearch/node/Node.java | 7 + .../org/opensearch/plugins/NetworkPlugin.java | 51 ++++ .../opensearch/transport/TcpTransport.java | 38 +-- .../org/opensearch/transport/Transport.java | 50 ++++ .../AbstractHttpServerTransportTests.java | 38 +-- .../transport/PublishPortTests.java | 38 +-- 44 files changed, 1958 insertions(+), 124 deletions(-) delete mode 100644 plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/grpc-api-1.68.2.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/grpc-api-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/build.gradle create mode 100644 plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/error_prone_annotations-LICENSE.txt create mode 100644 plugins/transport-grpc/licenses/error_prone_annotations-NOTICE.txt create mode 100644 plugins/transport-grpc/licenses/failureaccess-1.0.1.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/failureaccess-LICENSE.txt create mode 100644 plugins/transport-grpc/licenses/failureaccess-NOTICE.txt create mode 100644 plugins/transport-grpc/licenses/grpc-LICENSE.txt create mode 100644 plugins/transport-grpc/licenses/grpc-NOTICE.txt create mode 100644 plugins/transport-grpc/licenses/grpc-api-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/grpc-core-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/grpc-netty-shaded-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/grpc-protobuf-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/grpc-protobuf-lite-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/grpc-services-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/grpc-stub-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/grpc-util-1.68.2.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/guava-33.2.1-jre.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/guava-LICENSE.txt create mode 100644 plugins/transport-grpc/licenses/guava-NOTICE.txt create mode 100644 plugins/transport-grpc/licenses/perfmark-api-0.26.0.jar.sha1 create mode 100644 plugins/transport-grpc/licenses/perfmark-api-LICENSE.txt create mode 100644 plugins/transport-grpc/licenses/perfmark-api-NOTICE.txt create mode 100644 plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/GrpcPlugin.java create mode 100644 plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/Netty4GrpcServerTransport.java create mode 100644 plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/package-info.java create mode 100644 plugins/transport-grpc/src/main/plugin-metadata/plugin-security.policy create mode 100644 plugins/transport-grpc/src/test/java/org/opensearch/transport/grpc/Netty4GrpcServerTransportTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index e9e57a09704b0..a7f99722dd584 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add search replica stats to segment replication stats API ([#16678](https://github.com/opensearch-project/OpenSearch/pull/16678)) - Introduce a setting to disable download of full cluster state from remote on term mismatch([#16798](https://github.com/opensearch-project/OpenSearch/pull/16798/)) - Added ability to retrieve value from DocValues in a flat_object filed([#16802](https://github.com/opensearch-project/OpenSearch/pull/16802)) +- Introduce framework for auxiliary transports and an experimental gRPC transport plugin ([#16534](https://github.com/opensearch-project/OpenSearch/pull/16534)) ### Dependencies - Bump `com.google.cloud:google-cloud-core-http` from 2.23.0 to 2.47.0 ([#16504](https://github.com/opensearch-project/OpenSearch/pull/16504)) diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index ff0920f9d6057..f357fb248520c 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -27,7 +27,7 @@ google_http_client = "1.44.1" google_auth = "1.29.0" tdigest = "3.3" hdrhistogram = "2.2.2" -grpc = "1.68.0" +grpc = "1.68.2" # when updating the JNA version, also update the version in buildSrc/build.gradle jna = "5.13.0" diff --git a/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 b/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 deleted file mode 100644 index bf45716c5b8ce..0000000000000 --- a/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a9f25c58d8d5b0fcf37ae889a50fec87e34ac08 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/grpc-api-1.68.2.jar.sha1 b/plugins/discovery-gce/licenses/grpc-api-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..1844172dec982 --- /dev/null +++ b/plugins/discovery-gce/licenses/grpc-api-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a257a5dd25dda1c97a99b56d5b9c1e56c12ae554 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 deleted file mode 100644 index bf45716c5b8ce..0000000000000 --- a/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a9f25c58d8d5b0fcf37ae889a50fec87e34ac08 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-api-1.68.2.jar.sha1 b/plugins/repository-gcs/licenses/grpc-api-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..1844172dec982 --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-api-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a257a5dd25dda1c97a99b56d5b9c1e56c12ae554 \ No newline at end of file diff --git a/plugins/transport-grpc/build.gradle b/plugins/transport-grpc/build.gradle new file mode 100644 index 0000000000000..47f62b2b8c3f3 --- /dev/null +++ b/plugins/transport-grpc/build.gradle @@ -0,0 +1,168 @@ +import org.gradle.api.attributes.java.TargetJvmEnvironment + +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +opensearchplugin { + description 'gRPC based transport implementation' + classname 'org.opensearch.transport.grpc.GrpcPlugin' +} + +dependencies { + compileOnly "com.google.code.findbugs:jsr305:3.0.2" + runtimeOnly "com.google.guava:guava:${versions.guava}" + implementation "com.google.errorprone:error_prone_annotations:2.24.1" + implementation "com.google.guava:failureaccess:1.0.1" + implementation "io.grpc:grpc-api:${versions.grpc}" + implementation "io.grpc:grpc-core:${versions.grpc}" + implementation "io.grpc:grpc-netty-shaded:${versions.grpc}" + implementation "io.grpc:grpc-protobuf-lite:${versions.grpc}" + implementation "io.grpc:grpc-protobuf:${versions.grpc}" + implementation "io.grpc:grpc-services:${versions.grpc}" + implementation "io.grpc:grpc-stub:${versions.grpc}" + implementation "io.grpc:grpc-util:${versions.grpc}" + implementation "io.perfmark:perfmark-api:0.26.0" +} + +tasks.named("dependencyLicenses").configure { + mapping from: /grpc-.*/, to: 'grpc' +} + +thirdPartyAudit { + ignoreMissingClasses( + 'com.aayushatharva.brotli4j.Brotli4jLoader', + 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Status', + 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Wrapper', + 'com.aayushatharva.brotli4j.encoder.BrotliEncoderChannel', + 'com.aayushatharva.brotli4j.encoder.Encoder$Mode', + 'com.aayushatharva.brotli4j.encoder.Encoder$Parameters', + // classes are missing + + // from io.netty.logging.CommonsLoggerFactory (netty) + 'org.apache.commons.logging.Log', + 'org.apache.commons.logging.LogFactory', + + // from Log4j (deliberate, Netty will fallback to Log4j 2) + 'org.apache.log4j.Level', + 'org.apache.log4j.Logger', + + // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) + 'org.bouncycastle.cert.X509v3CertificateBuilder', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', + 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + 'org.bouncycastle.openssl.PEMEncryptedKeyPair', + 'org.bouncycastle.openssl.PEMParser', + 'org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter', + 'org.bouncycastle.openssl.jcajce.JceOpenSSLPKCS8DecryptorProviderBuilder', + 'org.bouncycastle.openssl.jcajce.JcePEMDecryptorProviderBuilder', + 'org.bouncycastle.pkcs.PKCS8EncryptedPrivateKeyInfo', + + // from io.netty.handler.ssl.JettyNpnSslEngine (netty) + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + + // from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) + 'org.jboss.marshalling.ByteInput', + + // from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) + 'org.jboss.marshalling.ByteOutput', + + // from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) + 'org.jboss.marshalling.Marshaller', + + // from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + + // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.helpers.FormattingTuple', + 'org.slf4j.helpers.MessageFormatter', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.slf4j.spi.LocationAwareLogger', + + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonToken', + 'com.google.protobuf.util.Durations', + 'com.google.protobuf.util.Timestamps', + 'com.google.protobuf.nano.CodedOutputByteBufferNano', + 'com.google.protobuf.nano.MessageNano', + 'com.google.rpc.Status', + 'com.google.rpc.Status$Builder', + 'com.ning.compress.BufferRecycler', + 'com.ning.compress.lzf.ChunkDecoder', + 'com.ning.compress.lzf.ChunkEncoder', + 'com.ning.compress.lzf.LZFChunk', + 'com.ning.compress.lzf.LZFEncoder', + 'com.ning.compress.lzf.util.ChunkDecoderFactory', + 'com.ning.compress.lzf.util.ChunkEncoderFactory', + 'lzma.sdk.lzma.Encoder', + 'net.jpountz.lz4.LZ4Compressor', + 'net.jpountz.lz4.LZ4Factory', + 'net.jpountz.lz4.LZ4FastDecompressor', + 'net.jpountz.xxhash.XXHash32', + 'net.jpountz.xxhash.XXHashFactory', + 'org.eclipse.jetty.alpn.ALPN$ClientProvider', + 'org.eclipse.jetty.alpn.ALPN$ServerProvider', + 'org.eclipse.jetty.alpn.ALPN', + + 'org.conscrypt.AllocatedBuffer', + 'org.conscrypt.BufferAllocator', + 'org.conscrypt.Conscrypt', + 'org.conscrypt.HandshakeListener', + + 'reactor.blockhound.BlockHound$Builder', + 'reactor.blockhound.integration.BlockHoundIntegration' + ) + + ignoreViolations( + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.Striped64', + 'com.google.common.hash.Striped64$1', + 'com.google.common.hash.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$1', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$2', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$3', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$4', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$5', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$1', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$2', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$3', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$4', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$6', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueConsumerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueProducerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.util.UnsafeLongArrayAccess', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess' + ) +} diff --git a/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 b/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 new file mode 100644 index 0000000000000..67723f6f51248 --- /dev/null +++ b/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 @@ -0,0 +1 @@ +32b299e45105aa9b0df8279c74dc1edfcf313ff0 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/error_prone_annotations-LICENSE.txt b/plugins/transport-grpc/licenses/error_prone_annotations-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/error_prone_annotations-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/error_prone_annotations-NOTICE.txt b/plugins/transport-grpc/licenses/error_prone_annotations-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/transport-grpc/licenses/failureaccess-1.0.1.jar.sha1 b/plugins/transport-grpc/licenses/failureaccess-1.0.1.jar.sha1 new file mode 100644 index 0000000000000..4798b37e20691 --- /dev/null +++ b/plugins/transport-grpc/licenses/failureaccess-1.0.1.jar.sha1 @@ -0,0 +1 @@ +1dcf1de382a0bf95a3d8b0849546c88bac1292c9 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/failureaccess-LICENSE.txt b/plugins/transport-grpc/licenses/failureaccess-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/failureaccess-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/failureaccess-NOTICE.txt b/plugins/transport-grpc/licenses/failureaccess-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/transport-grpc/licenses/grpc-LICENSE.txt b/plugins/transport-grpc/licenses/grpc-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/grpc-NOTICE.txt b/plugins/transport-grpc/licenses/grpc-NOTICE.txt new file mode 100644 index 0000000000000..f70c5620cf75a --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-NOTICE.txt @@ -0,0 +1,62 @@ +Copyright 2014 The gRPC Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +----------------------------------------------------------------------- + +This product contains a modified portion of 'OkHttp', an open source +HTTP & SPDY client for Android and Java applications, which can be obtained +at: + + * LICENSE: + * okhttp/third_party/okhttp/LICENSE (Apache License 2.0) + * HOMEPAGE: + * https://github.com/square/okhttp + * LOCATION_IN_GRPC: + * okhttp/third_party/okhttp + +This product contains a modified portion of 'Envoy', an open source +cloud-native high-performance edge/middle/service proxy, which can be +obtained at: + + * LICENSE: + * xds/third_party/envoy/LICENSE (Apache License 2.0) + * NOTICE: + * xds/third_party/envoy/NOTICE + * HOMEPAGE: + * https://www.envoyproxy.io + * LOCATION_IN_GRPC: + * xds/third_party/envoy + +This product contains a modified portion of 'protoc-gen-validate (PGV)', +an open source protoc plugin to generate polyglot message validators, +which can be obtained at: + + * LICENSE: + * xds/third_party/protoc-gen-validate/LICENSE (Apache License 2.0) + * NOTICE: + * xds/third_party/protoc-gen-validate/NOTICE + * HOMEPAGE: + * https://github.com/envoyproxy/protoc-gen-validate + * LOCATION_IN_GRPC: + * xds/third_party/protoc-gen-validate + +This product contains a modified portion of 'udpa', +an open source universal data plane API, which can be obtained at: + + * LICENSE: + * xds/third_party/udpa/LICENSE (Apache License 2.0) + * HOMEPAGE: + * https://github.com/cncf/udpa + * LOCATION_IN_GRPC: + * xds/third_party/udpa diff --git a/plugins/transport-grpc/licenses/grpc-api-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-api-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..1844172dec982 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-api-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a257a5dd25dda1c97a99b56d5b9c1e56c12ae554 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-core-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-core-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..e20345d29e914 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-core-1.68.2.jar.sha1 @@ -0,0 +1 @@ +b0fd51a1c029785d1c9ae2cfc80a296b60dfcfdb \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-netty-shaded-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-netty-shaded-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..53fa705a66129 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-netty-shaded-1.68.2.jar.sha1 @@ -0,0 +1 @@ +8ea4186fbdcc5432664364ed53e03cf0d458c3ec \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-protobuf-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-protobuf-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..e861b41837f33 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-protobuf-1.68.2.jar.sha1 @@ -0,0 +1 @@ +35b28e0d57874021cd31e76dd4a795f76a82471e \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-protobuf-lite-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-protobuf-lite-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..b2401f9752829 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-protobuf-lite-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a53064b896adcfefe74362a33e111492351dfc03 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-services-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-services-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..c4edf923791e5 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-services-1.68.2.jar.sha1 @@ -0,0 +1 @@ +6c2a0b0640544b9010a42bcf76f2791116a75c9d \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-stub-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-stub-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..118464f8f48ff --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-stub-1.68.2.jar.sha1 @@ -0,0 +1 @@ +d58ee1cf723b4b5536d44b67e328c163580a8d98 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-util-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-util-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..c3261b012e502 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-util-1.68.2.jar.sha1 @@ -0,0 +1 @@ +2d195570e9256d1357d584146a8e6b19587d4044 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/guava-33.2.1-jre.jar.sha1 b/plugins/transport-grpc/licenses/guava-33.2.1-jre.jar.sha1 new file mode 100644 index 0000000000000..27d5304e326df --- /dev/null +++ b/plugins/transport-grpc/licenses/guava-33.2.1-jre.jar.sha1 @@ -0,0 +1 @@ +818e780da2c66c63bbb6480fef1f3855eeafa3e4 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/guava-LICENSE.txt b/plugins/transport-grpc/licenses/guava-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/guava-NOTICE.txt b/plugins/transport-grpc/licenses/guava-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/transport-grpc/licenses/perfmark-api-0.26.0.jar.sha1 b/plugins/transport-grpc/licenses/perfmark-api-0.26.0.jar.sha1 new file mode 100644 index 0000000000000..abf1becd13298 --- /dev/null +++ b/plugins/transport-grpc/licenses/perfmark-api-0.26.0.jar.sha1 @@ -0,0 +1 @@ +ef65452adaf20bf7d12ef55913aba24037b82738 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/perfmark-api-LICENSE.txt b/plugins/transport-grpc/licenses/perfmark-api-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/plugins/transport-grpc/licenses/perfmark-api-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/perfmark-api-NOTICE.txt b/plugins/transport-grpc/licenses/perfmark-api-NOTICE.txt new file mode 100644 index 0000000000000..7d74b6569cf64 --- /dev/null +++ b/plugins/transport-grpc/licenses/perfmark-api-NOTICE.txt @@ -0,0 +1,40 @@ +Copyright 2019 Google LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +----------------------------------------------------------------------- + +This product contains a modified portion of 'Catapult', an open source +Trace Event viewer for Chome, Linux, and Android applications, which can +be obtained at: + + * LICENSE: + * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) + * HOMEPAGE: + * https://github.com/catapult-project/catapult + +This product contains a modified portion of 'Polymer', a library for Web +Components, which can be obtained at: + * LICENSE: + * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/polymer/LICENSE (New BSD License) + * HOMEPAGE: + * https://github.com/Polymer/polymer + + +This product contains a modified portion of 'ASM', an open source +Java Bytecode library, which can be obtained at: + + * LICENSE: + * agent/src/main/resources/io/perfmark/agent/third_party/asm/LICENSE (BSD style License) + * HOMEPAGE: + * https://asm.ow2.io/ diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/GrpcPlugin.java b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/GrpcPlugin.java new file mode 100644 index 0000000000000..0a464e135350b --- /dev/null +++ b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/GrpcPlugin.java @@ -0,0 +1,69 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.transport.grpc; + +import org.opensearch.common.network.NetworkService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.plugins.NetworkPlugin; +import org.opensearch.plugins.Plugin; +import org.opensearch.telemetry.tracing.Tracer; +import org.opensearch.threadpool.ThreadPool; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.GRPC_TRANSPORT_SETTING_KEY; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_BIND_HOST; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_HOST; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_PORTS; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_PUBLISH_HOST; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_PUBLISH_PORT; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_WORKER_COUNT; + +/** + * Main class for the gRPC plugin. + */ +public final class GrpcPlugin extends Plugin implements NetworkPlugin { + + /** + * Creates a new GrpcPlugin instance. + */ + public GrpcPlugin() {} + + @Override + public Map> getAuxTransports( + Settings settings, + ThreadPool threadPool, + CircuitBreakerService circuitBreakerService, + NetworkService networkService, + ClusterSettings clusterSettings, + Tracer tracer + ) { + return Collections.singletonMap( + GRPC_TRANSPORT_SETTING_KEY, + () -> new Netty4GrpcServerTransport(settings, Collections.emptyList(), networkService) + ); + } + + @Override + public List> getSettings() { + return List.of( + SETTING_GRPC_PORTS, + SETTING_GRPC_HOST, + SETTING_GRPC_PUBLISH_HOST, + SETTING_GRPC_BIND_HOST, + SETTING_GRPC_WORKER_COUNT, + SETTING_GRPC_PUBLISH_PORT + ); + } +} diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/Netty4GrpcServerTransport.java b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/Netty4GrpcServerTransport.java new file mode 100644 index 0000000000000..61c0722772b92 --- /dev/null +++ b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/Netty4GrpcServerTransport.java @@ -0,0 +1,277 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.transport.grpc; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.network.NetworkService; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.transport.PortsRange; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; +import org.opensearch.plugins.NetworkPlugin; +import org.opensearch.transport.BindTransportException; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + +import io.grpc.BindableService; +import io.grpc.InsecureServerCredentials; +import io.grpc.Server; +import io.grpc.netty.shaded.io.grpc.netty.NettyServerBuilder; +import io.grpc.netty.shaded.io.netty.channel.EventLoopGroup; +import io.grpc.netty.shaded.io.netty.channel.nio.NioEventLoopGroup; +import io.grpc.netty.shaded.io.netty.channel.socket.nio.NioServerSocketChannel; +import io.grpc.protobuf.services.HealthStatusManager; +import io.grpc.protobuf.services.ProtoReflectionService; + +import static java.util.Collections.emptyList; +import static org.opensearch.common.settings.Setting.intSetting; +import static org.opensearch.common.settings.Setting.listSetting; +import static org.opensearch.common.util.concurrent.OpenSearchExecutors.daemonThreadFactory; +import static org.opensearch.transport.Transport.resolveTransportPublishPort; + +/** + * Netty4 gRPC server implemented as a LifecycleComponent. + * Services injected through BindableService list. + */ +public class Netty4GrpcServerTransport extends NetworkPlugin.AuxTransport { + private static final Logger logger = LogManager.getLogger(Netty4GrpcServerTransport.class); + + /** + * Type key for configuring settings of this auxiliary transport. + */ + public static final String GRPC_TRANSPORT_SETTING_KEY = "experimental-transport-grpc"; + + /** + * Port range on which to bind. + * Note this setting is configured through AffixSetting AUX_TRANSPORT_PORTS where the aux transport type matches the GRPC_TRANSPORT_SETTING_KEY. + */ + public static final Setting SETTING_GRPC_PORTS = AUX_TRANSPORT_PORTS.getConcreteSettingForNamespace( + GRPC_TRANSPORT_SETTING_KEY + ); + + /** + * Port published to peers for this server. + */ + public static final Setting SETTING_GRPC_PUBLISH_PORT = intSetting("grpc.publish_port", -1, -1, Setting.Property.NodeScope); + + /** + * Host list to bind and publish. + * For distinct bind/publish hosts configure SETTING_GRPC_BIND_HOST + SETTING_GRPC_PUBLISH_HOST separately. + */ + public static final Setting> SETTING_GRPC_HOST = listSetting( + "grpc.host", + emptyList(), + Function.identity(), + Setting.Property.NodeScope + ); + + /** + * Host list to bind. + */ + public static final Setting> SETTING_GRPC_BIND_HOST = listSetting( + "grpc.bind_host", + SETTING_GRPC_HOST, + Function.identity(), + Setting.Property.NodeScope + ); + + /** + * Host list published to peers. + */ + public static final Setting> SETTING_GRPC_PUBLISH_HOST = listSetting( + "grpc.publish_host", + SETTING_GRPC_HOST, + Function.identity(), + Setting.Property.NodeScope + ); + + /** + * Configure size of thread pool backing this transport server. + */ + public static final Setting SETTING_GRPC_WORKER_COUNT = new Setting<>( + "grpc.netty.worker_count", + (s) -> Integer.toString(OpenSearchExecutors.allocatedProcessors(s)), + (s) -> Setting.parseInt(s, 1, "grpc.netty.worker_count"), + Setting.Property.NodeScope + ); + + private final Settings settings; + private final NetworkService networkService; + private final List services; + private final CopyOnWriteArrayList servers = new CopyOnWriteArrayList<>(); + private final String[] bindHosts; + private final String[] publishHosts; + private final PortsRange port; + private final int nettyEventLoopThreads; + + private volatile BoundTransportAddress boundAddress; + private volatile EventLoopGroup eventLoopGroup; + + /** + * Creates a new Netty4GrpcServerTransport instance. + * @param settings the configured settings. + * @param services the gRPC compatible services to be registered with the server. + * @param networkService the bind/publish addresses. + */ + public Netty4GrpcServerTransport(Settings settings, List services, NetworkService networkService) { + this.settings = Objects.requireNonNull(settings); + this.services = Objects.requireNonNull(services); + this.networkService = Objects.requireNonNull(networkService); + + final List httpBindHost = SETTING_GRPC_BIND_HOST.get(settings); + this.bindHosts = (httpBindHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(settings) : httpBindHost).toArray( + Strings.EMPTY_ARRAY + ); + + final List httpPublishHost = SETTING_GRPC_PUBLISH_HOST.get(settings); + this.publishHosts = (httpPublishHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.get(settings) : httpPublishHost) + .toArray(Strings.EMPTY_ARRAY); + + this.port = SETTING_GRPC_PORTS.get(settings); + this.nettyEventLoopThreads = SETTING_GRPC_WORKER_COUNT.get(settings); + } + + BoundTransportAddress boundAddress() { + return this.boundAddress; + } + + @Override + protected void doStart() { + boolean success = false; + try { + this.eventLoopGroup = new NioEventLoopGroup(nettyEventLoopThreads, daemonThreadFactory(settings, "grpc_event_loop")); + bindServer(); + success = true; + logger.info("Started gRPC server on port {}", port); + } finally { + if (!success) { + doStop(); + } + } + } + + @Override + protected void doStop() { + for (Server server : servers) { + if (server != null) { + server.shutdown(); + try { + server.awaitTermination(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warn("Interrupted while shutting down gRPC server"); + } finally { + server.shutdownNow(); + } + } + } + if (eventLoopGroup != null) { + try { + eventLoopGroup.shutdownGracefully(0, 10, TimeUnit.SECONDS).await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warn("Failed to shut down event loop group"); + } + } + } + + @Override + protected void doClose() { + + } + + private void bindServer() { + InetAddress[] hostAddresses; + try { + hostAddresses = networkService.resolveBindHostAddresses(bindHosts); + } catch (IOException e) { + throw new BindTransportException("Failed to resolve host [" + Arrays.toString(bindHosts) + "]", e); + } + + List boundAddresses = new ArrayList<>(hostAddresses.length); + for (InetAddress address : hostAddresses) { + boundAddresses.add(bindAddress(address, port)); + } + + final InetAddress publishInetAddress; + try { + publishInetAddress = networkService.resolvePublishHostAddresses(publishHosts); + } catch (Exception e) { + throw new BindTransportException("Failed to resolve publish address", e); + } + + final int publishPort = resolveTransportPublishPort(SETTING_GRPC_PUBLISH_PORT.get(settings), boundAddresses, publishInetAddress); + if (publishPort < 0) { + throw new BindTransportException( + "Failed to auto-resolve grpc publish port, multiple bound addresses " + + boundAddresses + + " with distinct ports and none of them matched the publish address (" + + publishInetAddress + + "). " + + "Please specify a unique port by setting " + + SETTING_GRPC_PORTS.getKey() + + " or " + + SETTING_GRPC_PUBLISH_PORT.getKey() + ); + } + + TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); + this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), publishAddress); + logger.info("{}", boundAddress); + } + + private TransportAddress bindAddress(InetAddress hostAddress, PortsRange portRange) { + AtomicReference lastException = new AtomicReference<>(); + AtomicReference addr = new AtomicReference<>(); + + boolean success = portRange.iterate(portNumber -> { + try { + + final InetSocketAddress address = new InetSocketAddress(hostAddress, portNumber); + final NettyServerBuilder serverBuilder = NettyServerBuilder.forAddress(address, InsecureServerCredentials.create()) + .bossEventLoopGroup(eventLoopGroup) + .workerEventLoopGroup(eventLoopGroup) + .channelType(NioServerSocketChannel.class) + .addService(new HealthStatusManager().getHealthService()) + .addService(ProtoReflectionService.newInstance()); + + services.forEach(serverBuilder::addService); + + Server srv = serverBuilder.build().start(); + servers.add(srv); + addr.set(new TransportAddress(hostAddress, portNumber)); + logger.debug("Bound gRPC to address {{}}", address); + return true; + } catch (Exception e) { + lastException.set(e); + return false; + } + }); + + if (!success) { + throw new RuntimeException("Failed to bind to " + hostAddress + " on ports " + portRange, lastException.get()); + } + + return addr.get(); + } +} diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/package-info.java b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/package-info.java new file mode 100644 index 0000000000000..4a5d9d02b5b91 --- /dev/null +++ b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/package-info.java @@ -0,0 +1,13 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * gRPC transport implementation for OpenSearch. + * Provides network communication using the gRPC protocol. + */ +package org.opensearch.transport.grpc; diff --git a/plugins/transport-grpc/src/main/plugin-metadata/plugin-security.policy b/plugins/transport-grpc/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 0000000000000..398de576b6c5a --- /dev/null +++ b/plugins/transport-grpc/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +grant codeBase "${codebase.grpc-netty-shaded}" { + // for reading the system-wide configuration for the backlog of established sockets + permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read"; + + // netty makes and accepts socket connections + permission java.net.SocketPermission "*", "accept,connect"; + + // Netty sets custom classloader for some of its internal threads + permission java.lang.RuntimePermission "*", "setContextClassLoader"; +}; diff --git a/plugins/transport-grpc/src/test/java/org/opensearch/transport/grpc/Netty4GrpcServerTransportTests.java b/plugins/transport-grpc/src/test/java/org/opensearch/transport/grpc/Netty4GrpcServerTransportTests.java new file mode 100644 index 0000000000000..ebeff62c2c23c --- /dev/null +++ b/plugins/transport-grpc/src/test/java/org/opensearch/transport/grpc/Netty4GrpcServerTransportTests.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.transport.grpc; + +import org.opensearch.common.network.NetworkService; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; +import org.hamcrest.MatcherAssert; +import org.junit.Before; + +import java.util.List; + +import io.grpc.BindableService; + +import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.not; + +public class Netty4GrpcServerTransportTests extends OpenSearchTestCase { + + private NetworkService networkService; + private List services; + + @Before + public void setup() { + networkService = new NetworkService(List.of()); + services = List.of(); + } + + public void test() { + try (Netty4GrpcServerTransport transport = new Netty4GrpcServerTransport(createSettings(), services, networkService)) { + transport.start(); + + MatcherAssert.assertThat(transport.boundAddress().boundAddresses(), not(emptyArray())); + assertNotNull(transport.boundAddress().publishAddress().address()); + + transport.stop(); + } + } + + private static Settings createSettings() { + return Settings.builder().put(Netty4GrpcServerTransport.SETTING_GRPC_PORTS.getKey(), getPortRange()).build(); + } +} diff --git a/server/src/main/java/org/opensearch/bootstrap/Security.java b/server/src/main/java/org/opensearch/bootstrap/Security.java index 53b1d990f9a0c..9f1dcbe8fb587 100644 --- a/server/src/main/java/org/opensearch/bootstrap/Security.java +++ b/server/src/main/java/org/opensearch/bootstrap/Security.java @@ -35,7 +35,9 @@ import org.opensearch.cli.Command; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.io.PathUtils; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; +import org.opensearch.common.transport.PortsRange; import org.opensearch.env.Environment; import org.opensearch.http.HttpTransportSettings; import org.opensearch.plugins.PluginInfo; @@ -71,6 +73,9 @@ import static org.opensearch.bootstrap.FilePermissionUtils.addDirectoryPath; import static org.opensearch.bootstrap.FilePermissionUtils.addSingleFilePath; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_PORT_DEFAULTS; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_PORTS; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_SETTING; /** * Initializes SecurityManager with necessary permissions. @@ -402,6 +407,7 @@ static void addFilePermissions(Permissions policy, Environment environment) thro private static void addBindPermissions(Permissions policy, Settings settings) { addSocketPermissionForHttp(policy, settings); addSocketPermissionForTransportProfiles(policy, settings); + addSocketPermissionForAux(policy, settings); } /** @@ -416,6 +422,29 @@ private static void addSocketPermissionForHttp(final Permissions policy, final S addSocketPermissionForPortRange(policy, httpRange); } + /** + * Add dynamic {@link SocketPermission} based on AffixSetting AUX_TRANSPORT_PORTS. + * If an auxiliary transport type is enabled but has no corresponding port range setting fall back to AUX_PORT_DEFAULTS. + * + * @param policy the {@link Permissions} instance to apply the dynamic {@link SocketPermission}s to. + * @param settings the {@link Settings} instance to read the gRPC settings from + */ + private static void addSocketPermissionForAux(final Permissions policy, final Settings settings) { + Set portsRanges = new HashSet<>(); + for (String auxType : AUX_TRANSPORT_TYPES_SETTING.get(settings)) { + Setting auxTypePortSettings = AUX_TRANSPORT_PORTS.getConcreteSettingForNamespace(auxType); + if (auxTypePortSettings.exists(settings)) { + portsRanges.add(auxTypePortSettings.get(settings)); + } else { + portsRanges.add(new PortsRange(AUX_PORT_DEFAULTS)); + } + } + + for (PortsRange portRange : portsRanges) { + addSocketPermissionForPortRange(policy, portRange.getPortRangeString()); + } + } + /** * Add dynamic {@link SocketPermission} based on transport settings. This method will first check if there is a port range specified in * the transport profile specified by {@code profileSettings} and will fall back to {@code settings}. diff --git a/server/src/main/java/org/opensearch/common/network/NetworkModule.java b/server/src/main/java/org/opensearch/common/network/NetworkModule.java index bb8da190a6f35..5d55fb52c323d 100644 --- a/server/src/main/java/org/opensearch/common/network/NetworkModule.java +++ b/server/src/main/java/org/opensearch/common/network/NetworkModule.java @@ -80,6 +80,9 @@ import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_KEY; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_SETTING; + /** * A module to handle registering and binding all network related classes. * @@ -157,6 +160,8 @@ public final class NetworkModule { private final Map> transportFactories = new HashMap<>(); private final Map> transportHttpFactories = new HashMap<>(); + private final Map> transportAuxFactories = new HashMap<>(); + private final List transportInterceptors = new ArrayList<>(); /** @@ -222,6 +227,18 @@ public NetworkModule( registerHttpTransport(entry.getKey(), entry.getValue()); } + Map> auxTransportFactory = plugin.getAuxTransports( + settings, + threadPool, + circuitBreakerService, + networkService, + clusterSettings, + tracer + ); + for (Map.Entry> entry : auxTransportFactory.entrySet()) { + registerAuxTransport(entry.getKey(), entry.getValue()); + } + Map> transportFactory = plugin.getTransports( settings, threadPool, @@ -305,6 +322,12 @@ private void registerHttpTransport(String key, Supplier fac } } + private void registerAuxTransport(String key, Supplier factory) { + if (transportAuxFactories.putIfAbsent(key, factory) != null) { + throw new IllegalArgumentException("transport for name: " + key + " is already registered"); + } + } + /** * Register an allocation command. *

@@ -346,6 +369,25 @@ public Supplier getHttpServerTransportSupplier() { return factory; } + /** + * Optional client/server transports that run in parallel to HttpServerTransport. + * Multiple transport types can be registered and enabled via AUX_TRANSPORT_TYPES_SETTING. + * An IllegalStateException is thrown if a transport type is enabled not registered. + */ + public List getAuxServerTransportList() { + List serverTransportSuppliers = new ArrayList<>(); + + for (String transportType : AUX_TRANSPORT_TYPES_SETTING.get(settings)) { + final Supplier factory = transportAuxFactories.get(transportType); + if (factory == null) { + throw new IllegalStateException("Unsupported " + AUX_TRANSPORT_TYPES_KEY + " [" + transportType + "]"); + } + serverTransportSuppliers.add(factory.get()); + } + + return serverTransportSuppliers; + } + public Supplier getTransportSupplier() { final String name; if (TRANSPORT_TYPE_SETTING.exists(settings)) { diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 007a68b8eaa29..c27efa080ac4e 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -149,6 +149,7 @@ import org.opensearch.node.resource.tracker.ResourceTrackerSettings; import org.opensearch.persistent.PersistentTasksClusterService; import org.opensearch.persistent.decider.EnableAssignmentDecider; +import org.opensearch.plugins.NetworkPlugin; import org.opensearch.plugins.PluginsService; import org.opensearch.ratelimitting.admissioncontrol.AdmissionControlSettings; import org.opensearch.ratelimitting.admissioncontrol.settings.CpuBasedAdmissionControllerSettings; @@ -362,6 +363,7 @@ public void apply(Settings value, Settings current, Settings previous) { NetworkModule.TRANSPORT_SSL_DUAL_MODE_ENABLED, NetworkModule.TRANSPORT_SSL_ENFORCE_HOSTNAME_VERIFICATION, NetworkModule.TRANSPORT_SSL_ENFORCE_HOSTNAME_VERIFICATION_RESOLVE_HOST_NAME, + NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_SETTING, HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS, HttpTransportSettings.SETTING_CORS_ENABLED, HttpTransportSettings.SETTING_CORS_MAX_AGE, diff --git a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java index 991fbf12072be..7f78ae0b9d2ff 100644 --- a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java @@ -62,6 +62,7 @@ import org.opensearch.telemetry.tracing.channels.TraceableRestChannel; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.BindTransportException; +import org.opensearch.transport.Transport; import java.io.IOException; import java.net.InetAddress; @@ -71,7 +72,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -192,7 +192,25 @@ protected void bindServer() { throw new BindTransportException("Failed to resolve publish address", e); } - final int publishPort = resolvePublishPort(settings, boundAddresses, publishInetAddress); + final int publishPort = Transport.resolveTransportPublishPort( + SETTING_HTTP_PUBLISH_PORT.get(settings), + boundAddresses, + publishInetAddress + ); + if (publishPort < 0) { + throw new BindHttpException( + "Failed to auto-resolve http publish port, multiple bound addresses " + + boundAddresses + + " with distinct ports and none of them matched the publish address (" + + publishInetAddress + + "). " + + "Please specify a unique port by setting " + + SETTING_HTTP_PORT.getKey() + + " or " + + SETTING_HTTP_PUBLISH_PORT.getKey() + ); + } + TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), publishAddress); logger.info("{}", boundAddress); @@ -258,47 +276,6 @@ protected void doClose() {} */ protected abstract void stopInternal(); - // package private for tests - static int resolvePublishPort(Settings settings, List boundAddresses, InetAddress publishInetAddress) { - int publishPort = SETTING_HTTP_PUBLISH_PORT.get(settings); - - if (publishPort < 0) { - for (TransportAddress boundAddress : boundAddresses) { - InetAddress boundInetAddress = boundAddress.address().getAddress(); - if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) { - publishPort = boundAddress.getPort(); - break; - } - } - } - - // if no matching boundAddress found, check if there is a unique port for all bound addresses - if (publishPort < 0) { - final Set ports = new HashSet<>(); - for (TransportAddress boundAddress : boundAddresses) { - ports.add(boundAddress.getPort()); - } - if (ports.size() == 1) { - publishPort = ports.iterator().next(); - } - } - - if (publishPort < 0) { - throw new BindHttpException( - "Failed to auto-resolve http publish port, multiple bound addresses " - + boundAddresses - + " with distinct ports and none of them matched the publish address (" - + publishInetAddress - + "). " - + "Please specify a unique port by setting " - + SETTING_HTTP_PORT.getKey() - + " or " - + SETTING_HTTP_PUBLISH_PORT.getKey() - ); - } - return publishPort; - } - public void onException(HttpChannel channel, Exception e) { channel.handleException(e); if (lifecycle.started() == false) { diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index c78ee6711dcda..704a23890b07a 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -1216,6 +1216,9 @@ protected Node( SearchExecutionStatsCollector.makeWrapper(responseCollectorService) ); final HttpServerTransport httpServerTransport = newHttpTransport(networkModule); + + pluginComponents.addAll(newAuxTransports(networkModule)); + final IndexingPressureService indexingPressureService = new IndexingPressureService(settings, clusterService); // Going forward, IndexingPressureService will have required constructs for exposing listeners/interfaces for plugin // development. Then we can deprecate Getter and Setter for IndexingPressureService in ClusterService (#478). @@ -2113,6 +2116,10 @@ protected HttpServerTransport newHttpTransport(NetworkModule networkModule) { return networkModule.getHttpServerTransportSupplier().get(); } + protected List newAuxTransports(NetworkModule networkModule) { + return networkModule.getAuxServerTransportList(); + } + private static class LocalNodeFactory implements Function { private final SetOnce localNode = new SetOnce<>(); private final String persistentNodeId; diff --git a/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java b/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java index 138ef6f71280d..516aa94534f94 100644 --- a/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java +++ b/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java @@ -31,9 +31,13 @@ package org.opensearch.plugins; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; +import org.opensearch.common.transport.PortsRange; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; @@ -49,8 +53,12 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.function.Supplier; +import static java.util.Collections.emptyList; +import static org.opensearch.common.settings.Setting.affixKeySetting; + /** * Plugin for extending network and transport related classes * @@ -58,6 +66,49 @@ */ public interface NetworkPlugin { + /** + * Auxiliary transports are lifecycle components with an associated port range. + * These pluggable client/server transport implementations have their lifecycle managed by Node. + * + * Auxiliary transports are additionally defined by a port range on which they bind. Opening permissions on these + * ports is awkward as {@link org.opensearch.bootstrap.Security} is configured previous to Node initialization during + * bootstrap. To allow pluggable AuxTransports access to configurable port ranges we require the port range be provided + * through an {@link org.opensearch.common.settings.Setting.AffixSetting} of the form 'AUX_SETTINGS_PREFIX.{aux-transport-key}.ports'. + */ + abstract class AuxTransport extends AbstractLifecycleComponent { + public static final String AUX_SETTINGS_PREFIX = "aux.transport."; + public static final String AUX_TRANSPORT_TYPES_KEY = AUX_SETTINGS_PREFIX + "types"; + public static final String AUX_PORT_DEFAULTS = "9400-9500"; + public static final Setting.AffixSetting AUX_TRANSPORT_PORTS = affixKeySetting( + AUX_SETTINGS_PREFIX, + "ports", + key -> new Setting<>(key, AUX_PORT_DEFAULTS, PortsRange::new, Setting.Property.NodeScope) + ); + + public static final Setting> AUX_TRANSPORT_TYPES_SETTING = Setting.listSetting( + AUX_TRANSPORT_TYPES_KEY, + emptyList(), + Function.identity(), + Setting.Property.NodeScope + ); + } + + /** + * Auxiliary transports are optional and run in parallel to the default HttpServerTransport. + * Returns a map of AuxTransport suppliers. + */ + @ExperimentalApi + default Map> getAuxTransports( + Settings settings, + ThreadPool threadPool, + CircuitBreakerService circuitBreakerService, + NetworkService networkService, + ClusterSettings clusterSettings, + Tracer tracer + ) { + return Collections.emptyMap(); + } + /** * Returns a list of {@link TransportInterceptor} instances that are used to intercept incoming and outgoing * transport (inter-node) requests. This must not return null diff --git a/server/src/main/java/org/opensearch/transport/TcpTransport.java b/server/src/main/java/org/opensearch/transport/TcpTransport.java index f56cd146ce953..f80a29872a78d 100644 --- a/server/src/main/java/org/opensearch/transport/TcpTransport.java +++ b/server/src/main/java/org/opensearch/transport/TcpTransport.java @@ -521,38 +521,8 @@ private BoundTransportAddress createBoundTransportAddress(ProfileSettings profil throw new BindTransportException("Failed to resolve publish address", e); } - final int publishPort = resolvePublishPort(profileSettings, boundAddresses, publishInetAddress); - final TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); - return new BoundTransportAddress(transportBoundAddresses, publishAddress); - } - - // package private for tests - static int resolvePublishPort(ProfileSettings profileSettings, List boundAddresses, InetAddress publishInetAddress) { - int publishPort = profileSettings.publishPort; - - // if port not explicitly provided, search for port of address in boundAddresses that matches publishInetAddress - if (publishPort < 0) { - for (InetSocketAddress boundAddress : boundAddresses) { - InetAddress boundInetAddress = boundAddress.getAddress(); - if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) { - publishPort = boundAddress.getPort(); - break; - } - } - } - - // if no matching boundAddress found, check if there is a unique port for all bound addresses - if (publishPort < 0) { - final Set ports = new HashSet<>(); - for (InetSocketAddress boundAddress : boundAddresses) { - ports.add(boundAddress.getPort()); - } - if (ports.size() == 1) { - publishPort = ports.iterator().next(); - } - } - - if (publishPort < 0) { + final int publishPort = Transport.resolvePublishPort(profileSettings.publishPort, boundAddresses, publishInetAddress); + if (publishPort == -1) { String profileExplanation = profileSettings.isDefaultProfile ? "" : " for profile " + profileSettings.profileName; throw new BindTransportException( "Failed to auto-resolve publish port" @@ -568,7 +538,9 @@ static int resolvePublishPort(ProfileSettings profileSettings, List boundAddresses, InetAddress publishInetAddress) { + if (publishPort < 0) { + for (InetSocketAddress boundAddress : boundAddresses) { + InetAddress boundInetAddress = boundAddress.getAddress(); + if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) { + publishPort = boundAddress.getPort(); + break; + } + } + } + + if (publishPort < 0) { + final Set ports = new HashSet<>(); + for (InetSocketAddress boundAddress : boundAddresses) { + ports.add(boundAddress.getPort()); + } + if (ports.size() == 1) { + publishPort = ports.iterator().next(); + } + } + + return publishPort; + } + + static int resolveTransportPublishPort(int publishPort, List boundAddresses, InetAddress publishInetAddress) { + return Transport.resolvePublishPort( + publishPort, + boundAddresses.stream().map(TransportAddress::address).collect(Collectors.toList()), + publishInetAddress + ); + } + /** * A unidirectional connection to a {@link DiscoveryNode} * diff --git a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java index c34f13041cb11..a4295289c3109 100644 --- a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java @@ -59,6 +59,7 @@ import org.opensearch.test.rest.FakeRestRequest; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.Transport; import org.junit.After; import org.junit.Before; @@ -70,8 +71,6 @@ import static java.net.InetAddress.getByName; import static java.util.Arrays.asList; -import static org.opensearch.http.AbstractHttpServerTransport.resolvePublishPort; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class AbstractHttpServerTransportTests extends OpenSearchTestCase { @@ -101,47 +100,40 @@ public void testHttpPublishPort() throws Exception { int boundPort = randomIntBetween(9000, 9100); int otherBoundPort = randomIntBetween(9200, 9300); - int publishPort = resolvePublishPort( - Settings.builder().put(HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT.getKey(), 9080).build(), - randomAddresses(), - getByName("127.0.0.2") - ); + int publishPort = Transport.resolveTransportPublishPort(9080, randomAddresses(), getByName("127.0.0.2")); assertThat("Publish port should be explicitly set to 9080", publishPort, equalTo(9080)); - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matched address", publishPort, equalTo(boundPort)); - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("127.0.0.1", boundPort), address("127.0.0.2", boundPort)), getByName("127.0.0.3") ); assertThat("Publish port should be derived from unique port of bound addresses", publishPort, equalTo(boundPort)); - final BindHttpException e = expectThrows( - BindHttpException.class, - () -> resolvePublishPort( - Settings.EMPTY, - asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), - getByName("127.0.0.3") - ) + publishPort = Transport.resolveTransportPublishPort( + -1, + asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("127.0.0.3") ); - assertThat(e.getMessage(), containsString("Failed to auto-resolve http publish port")); + assertThat(publishPort, equalTo(-1)); - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matching wildcard address", publishPort, equalTo(boundPort)); if (NetworkUtils.SUPPORTS_V6) { - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("::1") ); diff --git a/server/src/test/java/org/opensearch/transport/PublishPortTests.java b/server/src/test/java/org/opensearch/transport/PublishPortTests.java index 6a41409f6f181..2e5a57c4cdd60 100644 --- a/server/src/test/java/org/opensearch/transport/PublishPortTests.java +++ b/server/src/test/java/org/opensearch/transport/PublishPortTests.java @@ -43,8 +43,6 @@ import static java.net.InetAddress.getByName; import static java.util.Arrays.asList; -import static org.opensearch.transport.TcpTransport.resolvePublishPort; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class PublishPortTests extends OpenSearchTestCase { @@ -73,48 +71,44 @@ public void testPublishPort() throws Exception { } - int publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(settings, profile), + int publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(settings, profile).publishPort, randomAddresses(), getByName("127.0.0.2") ); assertThat("Publish port should be explicitly set", publishPort, equalTo(useProfile ? 9080 : 9081)); - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matched address", publishPort, equalTo(boundPort)); - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("127.0.0.1", boundPort), address("127.0.0.2", boundPort)), getByName("127.0.0.3") ); assertThat("Publish port should be derived from unique port of bound addresses", publishPort, equalTo(boundPort)); - try { - resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), - asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), - getByName("127.0.0.3") - ); - fail("Expected BindTransportException as publish_port not specified and non-unique port of bound addresses"); - } catch (BindTransportException e) { - assertThat(e.getMessage(), containsString("Failed to auto-resolve publish port")); - } + int resPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, + asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("127.0.0.3") + ); + assertThat("as publish_port not specified and non-unique port of bound addresses", resPort, equalTo(-1)); - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matching wildcard address", publishPort, equalTo(boundPort)); if (NetworkUtils.SUPPORTS_V6) { - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("::1") ); From ab546ae7989b522f96020c75334014495b5149ab Mon Sep 17 00:00:00 2001 From: gaobinlong Date: Thu, 2 Jan 2025 23:24:08 +0800 Subject: [PATCH 15/27] Update script supports java.lang.String.sha1() and java.lang.String.sha256() methods (#16923) * Update script supports java.lang.String.sha1() and java.lang.String.sha256() methods Signed-off-by: Gao Binlong * Modify change log Signed-off-by: Gao Binlong --------- Signed-off-by: Gao Binlong --- CHANGELOG.md | 1 + .../painless/PainlessModulePlugin.java | 6 +++ .../painless/spi/org.opensearch.update.txt | 14 +++++++ .../rest-api-spec/test/painless/15_update.yml | 36 ++++++++++++++++++ .../test/reindex/85_scripting.yml | 38 +++++++++++++++++++ .../test/update_by_query/80_scripting.yml | 35 +++++++++++++++++ 6 files changed, 130 insertions(+) create mode 100644 modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.update.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index a7f99722dd584..45bc56b505eb3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Support prefix list for remote repository attributes([#16271](https://github.com/opensearch-project/OpenSearch/pull/16271)) - Add new configuration setting `synonym_analyzer`, to the `synonym` and `synonym_graph` filters, enabling the specification of a custom analyzer for reading the synonym file ([#16488](https://github.com/opensearch-project/OpenSearch/pull/16488)). - Add stats for remote publication failure and move download failure stats to remote methods([#16682](https://github.com/opensearch-project/OpenSearch/pull/16682/)) +- Update script supports java.lang.String.sha1() and java.lang.String.sha256() methods ([#16923](https://github.com/opensearch-project/OpenSearch/pull/16923)) - Added a precaution to handle extreme date values during sorting to prevent `arithmetic_exception: long overflow` ([#16812](https://github.com/opensearch-project/OpenSearch/pull/16812)). - Add search replica stats to segment replication stats API ([#16678](https://github.com/opensearch-project/OpenSearch/pull/16678)) - Introduce a setting to disable download of full cluster state from remote on term mismatch([#16798](https://github.com/opensearch-project/OpenSearch/pull/16798/)) diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java index 55dc23f665d2e..b3f6f7d0730fd 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java @@ -66,6 +66,7 @@ import org.opensearch.script.ScriptContext; import org.opensearch.script.ScriptEngine; import org.opensearch.script.ScriptService; +import org.opensearch.script.UpdateScript; import org.opensearch.search.aggregations.pipeline.MovingFunctionScript; import org.opensearch.threadpool.ThreadPool; import org.opensearch.watcher.ResourceWatcherService; @@ -109,6 +110,11 @@ public final class PainlessModulePlugin extends Plugin implements ScriptPlugin, ingest.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.ingest.txt")); map.put(IngestScript.CONTEXT, ingest); + // Functions available to update scripts + List update = new ArrayList<>(Allowlist.BASE_ALLOWLISTS); + update.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.update.txt")); + map.put(UpdateScript.CONTEXT, update); + // Functions available to derived fields List derived = new ArrayList<>(Allowlist.BASE_ALLOWLISTS); derived.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.derived.txt")); diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.update.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.update.txt new file mode 100644 index 0000000000000..144614b3862b0 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.update.txt @@ -0,0 +1,14 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# + +# This file contains an allowlist for the update scripts + +class java.lang.String { + String org.opensearch.painless.api.Augmentation sha1() + String org.opensearch.painless.api.Augmentation sha256() +} diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index cb118ed9d562f..e0f3068810ed8 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -123,3 +123,39 @@ - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.type: "illegal_argument_exception" } - match: { error.reason: "Iterable object is self-referencing itself" } + +# update script supports java.lang.String.sha1() and java.lang.String.sha256() methods +# related issue: https://github.com/opensearch-project/OpenSearch/issues/16423 +--- +"Update script supports sha1() and sha256() method for strings": + - skip: + version: " - 2.18.99" + reason: "introduced in 2.19.0" + - do: + index: + index: test_1 + id: 1 + body: + foo: bar + + - do: + update: + index: test_1 + id: 1 + body: + script: + lang: painless + source: "ctx._source.foo_sha1 = ctx._source.foo.sha1();ctx._source.foo_sha256 = ctx._source.foo.sha256();" + + - match: { _index: test_1 } + - match: { _id: "1" } + - match: { _version: 2 } + + - do: + get: + index: test_1 + id: 1 + + - match: { _source.foo: bar } + - match: { _source.foo_sha1: "62cdb7020ff920e5aa642c3d4066950dd1f01f4d" } + - match: { _source.foo_sha256: "fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml index 9c38b13bb1ff0..5c218aa00ca4f 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml @@ -440,3 +440,41 @@ lang: painless source: syntax errors are fun! - match: {error.reason: 'compile error'} + +# script in reindex supports java.lang.String.sha1() and java.lang.String.sha256() methods +# related issue: https://github.com/opensearch-project/OpenSearch/issues/16423 +--- +"Script supports sha1() and sha256() method for strings": + - skip: + version: " - 2.18.99" + reason: "introduced in 2.19.0" + - do: + index: + index: twitter + id: 1 + body: { "user": "foobar" } + - do: + indices.refresh: {} + + - do: + reindex: + refresh: true + body: + source: + index: twitter + dest: + index: new_twitter + script: + lang: painless + source: ctx._source.user_sha1 = ctx._source.user.sha1();ctx._source.user_sha256 = ctx._source.user.sha256() + - match: {created: 1} + - match: {noops: 0} + + - do: + get: + index: new_twitter + id: 1 + + - match: { _source.user: foobar } + - match: { _source.user_sha1: "8843d7f92416211de9ebb963ff4ce28125932878" } + - match: { _source.user_sha256: "c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml index a8de49d812677..b52b1428e08bb 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml @@ -432,3 +432,38 @@ lang: painless source: syntax errors are fun! - match: {error.reason: 'compile error'} + +# script in update_by_query supports java.lang.String.sha1() and java.lang.String.sha256() methods +# related issue: https://github.com/opensearch-project/OpenSearch/issues/16423 +--- +"Script supports sha1() and sha256() method for strings": + - skip: + version: " - 2.18.99" + reason: "introduced in 2.19.0" + - do: + index: + index: twitter + id: 1 + body: { "user": "foobar" } + - do: + indices.refresh: {} + + - do: + update_by_query: + index: twitter + refresh: true + body: + script: + lang: painless + source: ctx._source.user_sha1 = ctx._source.user.sha1();ctx._source.user_sha256 = ctx._source.user.sha256() + - match: {updated: 1} + - match: {noops: 0} + + - do: + get: + index: twitter + id: 1 + + - match: { _source.user: foobar } + - match: { _source.user_sha1: "8843d7f92416211de9ebb963ff4ce28125932878" } + - match: { _source.user_sha256: "c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2" } From 542b5510cdba4ec23ed2d8c96f089f53007edb05 Mon Sep 17 00:00:00 2001 From: Prudhvi Godithi Date: Thu, 2 Jan 2025 12:11:46 -0800 Subject: [PATCH 16/27] Workflow benchmark-pull-request.yml fix (#16925) Signed-off-by: Prudhvi Godithi --- .github/workflows/benchmark-pull-request.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/benchmark-pull-request.yml b/.github/workflows/benchmark-pull-request.yml index c494df6e27ce3..e6ccc31160bf9 100644 --- a/.github/workflows/benchmark-pull-request.yml +++ b/.github/workflows/benchmark-pull-request.yml @@ -4,7 +4,10 @@ on: types: [created] jobs: run-performance-benchmark-on-pull-request: - if: ${{ (github.event.issue.pull_request) && (contains(github.event.comment.body, '"run-benchmark-test"')) }} + if: | + github.repository == 'opensearch-project/OpenSearch' && + github.event.issue.pull_request && + contains(github.event.comment.body, '"run-benchmark-test"') runs-on: ubuntu-latest permissions: id-token: write @@ -111,7 +114,7 @@ jobs: uses: actions/github-script@v7 with: github-token: ${{ secrets.GITHUB_TOKEN }} - result-encoding: string + result-encoding: json script: | // Get the collaborators - filtered to maintainer permissions const maintainersResponse = await github.request('GET /repos/{owner}/{repo}/collaborators', { @@ -121,12 +124,12 @@ jobs: affiliation: 'all', per_page: 100 }); - return maintainersResponse.data.map(item => item.login).join(', '); + return maintainersResponse.data.map(item => item.login); - uses: trstringer/manual-approval@v1 - if: (!contains(steps.get_approvers.outputs.result, github.event.comment.user.login)) + if: ${{ !contains(fromJSON(steps.get_approvers.outputs.result), github.event.comment.user.login) }} with: secret: ${{ github.TOKEN }} - approvers: ${{ steps.get_approvers.outputs.result }} + approvers: ${{ join(fromJSON(steps.get_approvers.outputs.result), ', ') }} minimum-approvals: 1 issue-title: 'Request to approve/deny benchmark run for PR #${{ env.PR_NUMBER }}' issue-body: "Please approve or deny the benchmark run for PR #${{ env.PR_NUMBER }}" From 1f94b34197347ef2027170aae455c62386b2a342 Mon Sep 17 00:00:00 2001 From: Rishabh Singh Date: Fri, 3 Jan 2025 13:02:37 -0800 Subject: [PATCH 17/27] Add benchmark confirm for lucene-10 big5 index snapshot (#16940) Signed-off-by: Rishabh Singh --- .github/benchmark-configs.json | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/benchmark-configs.json b/.github/benchmark-configs.json index 732f2f9b96ae3..b3590f8a2f942 100644 --- a/.github/benchmark-configs.json +++ b/.github/benchmark-configs.json @@ -239,5 +239,22 @@ "data_instance_config": "4vCPU, 32G Mem, 16G Heap" }, "baseline_cluster_config": "x64-r5.xlarge-1-shard-0-replica-snapshot-baseline" + }, + "id_15": { + "description": "Search only test-procedure for big5, uses lucene-10 index snapshot to restore the data for OS-3.0.0", + "supported_major_versions": ["3"], + "cluster-benchmark-configs": { + "SINGLE_NODE_CLUSTER": "true", + "MIN_DISTRIBUTION": "true", + "TEST_WORKLOAD": "big5", + "WORKLOAD_PARAMS": "{\"snapshot_repo_name\":\"benchmark-workloads-repo-3x\",\"snapshot_bucket_name\":\"benchmark-workload-snapshots\",\"snapshot_region\":\"us-east-1\",\"snapshot_base_path\":\"workload-snapshots-3x\",\"snapshot_name\":\"big5_1_shard_single_client\"}", + "CAPTURE_NODE_STAT": "true", + "TEST_PROCEDURE": "restore-from-snapshot" + }, + "cluster_configuration": { + "size": "Single-Node", + "data_instance_config": "4vCPU, 32G Mem, 16G Heap" + }, + "baseline_cluster_config": "x64-r5.xlarge-1-shard-0-replica-snapshot-baseline" } } From 703eddab489607ee25a7db8428c076d89826b7c6 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Fri, 3 Jan 2025 18:07:04 -0500 Subject: [PATCH 18/27] Remove duplicate DCO check (#16942) Signed-off-by: Andriy Redko --- .github/workflows/dco.yml | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 .github/workflows/dco.yml diff --git a/.github/workflows/dco.yml b/.github/workflows/dco.yml deleted file mode 100644 index ef842bb405d60..0000000000000 --- a/.github/workflows/dco.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Developer Certificate of Origin Check - -on: [pull_request] - -jobs: - dco-check: - runs-on: ubuntu-latest - - steps: - - name: Get PR Commits - id: 'get-pr-commits' - uses: tim-actions/get-pr-commits@v1.3.1 - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: DCO Check - uses: tim-actions/dco@v1.1.0 - with: - commits: ${{ steps.get-pr-commits.outputs.commits }} - From 845fbfa10407f3264e6aab8812eff4ef0ad8be24 Mon Sep 17 00:00:00 2001 From: Craig Perkins Date: Fri, 3 Jan 2025 18:11:49 -0500 Subject: [PATCH 19/27] Allow extended plugins to be optional (#16909) * Make extended plugins optional Signed-off-by: Craig Perkins * Make extended plugins optional Signed-off-by: Craig Perkins * Load extensions for classpath plugins Signed-off-by: Craig Perkins * Ensure only single instance for each classpath extension Signed-off-by: Craig Perkins * Add test for classpath plugin extended plugin loading Signed-off-by: Craig Perkins * Modify test to allow optional extended plugin Signed-off-by: Craig Perkins * Only optional extended plugins Signed-off-by: Craig Perkins * Add additional warning message Signed-off-by: Craig Perkins * Add to CHANGELOG Signed-off-by: Craig Perkins * Add tag to make extended plugin optional Signed-off-by: Craig Perkins * Only send plugin names when serializing PluginInfo Signed-off-by: Craig Perkins * Keep track of optional extended plugins in separate set Signed-off-by: Craig Perkins * Include in ser/de of PluginInfo Signed-off-by: Craig Perkins * Change to 3_0_0 Signed-off-by: Craig Perkins --------- Signed-off-by: Craig Perkins --- CHANGELOG.md | 1 + .../org/opensearch/plugins/PluginInfo.java | 33 +++++++++++++++++-- .../opensearch/plugins/PluginsService.java | 15 ++++++++- .../opensearch/plugins/PluginInfoTests.java | 27 +++++++++++++++ .../plugins/PluginsServiceTests.java | 29 +++++++++++++++- 5 files changed, 101 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 45bc56b505eb3..5f813fecf66cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -59,6 +59,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) - Make entries for dependencies from server/build.gradle to gradle version catalog ([#16707](https://github.com/opensearch-project/OpenSearch/pull/16707)) +- Allow extended plugins to be optional ([#16909](https://github.com/opensearch-project/OpenSearch/pull/16909)) ### Deprecated - Performing update operation with default pipeline or final pipeline is deprecated ([#16712](https://github.com/opensearch-project/OpenSearch/pull/16712)) diff --git a/server/src/main/java/org/opensearch/plugins/PluginInfo.java b/server/src/main/java/org/opensearch/plugins/PluginInfo.java index b6030f4ded5e5..7173a653ebc9a 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/opensearch/plugins/PluginInfo.java @@ -86,6 +86,8 @@ public class PluginInfo implements Writeable, ToXContentObject { private final String classname; private final String customFolderName; private final List extendedPlugins; + // Optional extended plugins are a subset of extendedPlugins that only contains the optional extended plugins + private final List optionalExtendedPlugins; private final boolean hasNativeController; /** @@ -149,7 +151,11 @@ public PluginInfo( this.javaVersion = javaVersion; this.classname = classname; this.customFolderName = customFolderName; - this.extendedPlugins = Collections.unmodifiableList(extendedPlugins); + this.extendedPlugins = extendedPlugins.stream().map(s -> s.split(";")[0]).collect(Collectors.toUnmodifiableList()); + this.optionalExtendedPlugins = extendedPlugins.stream() + .filter(PluginInfo::isOptionalExtension) + .map(s -> s.split(";")[0]) + .collect(Collectors.toUnmodifiableList()); this.hasNativeController = hasNativeController; } @@ -209,6 +215,16 @@ public PluginInfo(final StreamInput in) throws IOException { this.customFolderName = in.readString(); this.extendedPlugins = in.readStringList(); this.hasNativeController = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + this.optionalExtendedPlugins = in.readStringList(); + } else { + this.optionalExtendedPlugins = new ArrayList<>(); + } + } + + static boolean isOptionalExtension(String extendedPlugin) { + String[] dependency = extendedPlugin.split(";"); + return dependency.length > 1 && "optional=true".equals(dependency[1]); } @Override @@ -234,6 +250,9 @@ This works for currently supported range notations (=,~) } out.writeStringCollection(extendedPlugins); out.writeBoolean(hasNativeController); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + out.writeStringCollection(optionalExtendedPlugins); + } } /** @@ -417,8 +436,17 @@ public String getFolderName() { * * @return the names of the plugins extended */ + public boolean isExtendedPluginOptional(String extendedPlugin) { + return optionalExtendedPlugins.contains(extendedPlugin); + } + + /** + * Other plugins this plugin extends through SPI + * + * @return the names of the plugins extended + */ public List getExtendedPlugins() { - return extendedPlugins; + return extendedPlugins.stream().map(s -> s.split(";")[0]).collect(Collectors.toUnmodifiableList()); } /** @@ -493,6 +521,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("custom_foldername", customFolderName); builder.field("extended_plugins", extendedPlugins); builder.field("has_native_controller", hasNativeController); + builder.field("optional_extended_plugins", optionalExtendedPlugins); } builder.endObject(); diff --git a/server/src/main/java/org/opensearch/plugins/PluginsService.java b/server/src/main/java/org/opensearch/plugins/PluginsService.java index f08c9c738f1b4..9bc1f1334122e 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginsService.java +++ b/server/src/main/java/org/opensearch/plugins/PluginsService.java @@ -524,7 +524,13 @@ private static void addSortedBundle( for (String dependency : bundle.plugin.getExtendedPlugins()) { Bundle depBundle = bundles.get(dependency); if (depBundle == null) { - throw new IllegalArgumentException("Missing plugin [" + dependency + "], dependency of [" + name + "]"); + if (bundle.plugin.isExtendedPluginOptional(dependency)) { + logger.warn("Missing plugin [" + dependency + "], dependency of [" + name + "]"); + logger.warn("Some features of this plugin may not function without the dependencies being installed.\n"); + continue; + } else { + throw new IllegalArgumentException("Missing plugin [" + dependency + "], dependency of [" + name + "]"); + } } addSortedBundle(depBundle, bundles, sortedBundles, dependencyStack); assert sortedBundles.contains(depBundle); @@ -653,6 +659,9 @@ static void checkBundleJarHell(Set classpath, Bundle bundle, Map urls = new HashSet<>(); for (String extendedPlugin : exts) { Set pluginUrls = transitiveUrls.get(extendedPlugin); + if (pluginUrls == null && bundle.plugin.isExtendedPluginOptional(extendedPlugin)) { + continue; + } assert pluginUrls != null : "transitive urls should have already been set for " + extendedPlugin; Set intersection = new HashSet<>(urls); @@ -704,6 +713,10 @@ private Plugin loadBundle(Bundle bundle, Map loaded) { List extendedLoaders = new ArrayList<>(); for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { Plugin extendedPlugin = loaded.get(extendedPluginName); + if (extendedPlugin == null && bundle.plugin.isExtendedPluginOptional(extendedPluginName)) { + // extended plugin is optional and is not installed + continue; + } assert extendedPlugin != null; if (ExtensiblePlugin.class.isInstance(extendedPlugin) == false) { throw new IllegalStateException("Plugin [" + name + "] cannot extend non-extensible plugin [" + extendedPluginName + "]"); diff --git a/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java b/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java index 12c7dc870c104..76294d85c64d4 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java @@ -44,6 +44,7 @@ import org.opensearch.semver.SemverRange; import org.opensearch.test.OpenSearchTestCase; +import java.io.IOException; import java.nio.ByteBuffer; import java.nio.file.Path; import java.util.ArrayList; @@ -55,6 +56,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class PluginInfoTests extends OpenSearchTestCase { @@ -281,6 +283,30 @@ public void testReadFromPropertiesJvmMissingClassname() throws Exception { assertThat(e.getMessage(), containsString("property [classname] is missing")); } + public void testExtendedPluginsSingleOptionalExtension() throws IOException { + Path pluginDir = createTempDir().resolve("fake-plugin"); + PluginTestUtil.writePluginProperties( + pluginDir, + "description", + "fake desc", + "name", + "my_plugin", + "version", + "1.0", + "opensearch.version", + Version.CURRENT.toString(), + "java.version", + System.getProperty("java.specification.version"), + "classname", + "FakePlugin", + "extended.plugins", + "foo;optional=true" + ); + PluginInfo info = PluginInfo.readFromProperties(pluginDir); + assertThat(info.getExtendedPlugins(), contains("foo")); + assertThat(info.isExtendedPluginOptional("foo"), is(true)); + } + public void testExtendedPluginsSingleExtension() throws Exception { Path pluginDir = createTempDir().resolve("fake-plugin"); PluginTestUtil.writePluginProperties( @@ -302,6 +328,7 @@ public void testExtendedPluginsSingleExtension() throws Exception { ); PluginInfo info = PluginInfo.readFromProperties(pluginDir); assertThat(info.getExtendedPlugins(), contains("foo")); + assertThat(info.isExtendedPluginOptional("foo"), is(false)); } public void testExtendedPluginsMultipleExtensions() throws Exception { diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index bd9ee33856f14..f5702fa1a7ade 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -361,7 +361,7 @@ public void testSortBundlesNoDeps() throws Exception { assertThat(sortedBundles, Matchers.contains(bundle1, bundle2, bundle3)); } - public void testSortBundlesMissingDep() throws Exception { + public void testSortBundlesMissingRequiredDep() throws Exception { Path pluginDir = createTempDir(); PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", "MyPlugin", Collections.singletonList("dne"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); @@ -372,6 +372,33 @@ public void testSortBundlesMissingDep() throws Exception { assertEquals("Missing plugin [dne], dependency of [foo]", e.getMessage()); } + public void testSortBundlesMissingOptionalDep() throws Exception { + try (MockLogAppender mockLogAppender = MockLogAppender.createForLoggers(LogManager.getLogger(PluginsService.class))) { + mockLogAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "[.test] warning", + "org.opensearch.plugins.PluginsService", + Level.WARN, + "Missing plugin [dne], dependency of [foo]" + ) + ); + Path pluginDir = createTempDir(); + PluginInfo info = new PluginInfo( + "foo", + "desc", + "1.0", + Version.CURRENT, + "1.8", + "MyPlugin", + Collections.singletonList("dne;optional=true"), + false + ); + PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); + PluginsService.sortBundles(Collections.singleton(bundle)); + mockLogAppender.assertAllExpectationsMatched(); + } + } + public void testSortBundlesCommonDep() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order From c0f7806753c74776465bb483f0201bc5897c15a2 Mon Sep 17 00:00:00 2001 From: Craig Perkins Date: Fri, 3 Jan 2025 21:48:15 -0500 Subject: [PATCH 20/27] Change version in PluginInfo to V_2_19_0 after backport to 2.x merged (#16947) Signed-off-by: Craig Perkins --- server/src/main/java/org/opensearch/plugins/PluginInfo.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/plugins/PluginInfo.java b/server/src/main/java/org/opensearch/plugins/PluginInfo.java index 7173a653ebc9a..4ff699e8017ba 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/opensearch/plugins/PluginInfo.java @@ -215,7 +215,7 @@ public PluginInfo(final StreamInput in) throws IOException { this.customFolderName = in.readString(); this.extendedPlugins = in.readStringList(); this.hasNativeController = in.readBoolean(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_19_0)) { this.optionalExtendedPlugins = in.readStringList(); } else { this.optionalExtendedPlugins = new ArrayList<>(); @@ -250,7 +250,7 @@ This works for currently supported range notations (=,~) } out.writeStringCollection(extendedPlugins); out.writeBoolean(hasNativeController); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_19_0)) { out.writeStringCollection(optionalExtendedPlugins); } } From d7641ca8788441e384fbde6c58b6f2530ec8772d Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Mon, 6 Jan 2025 14:06:16 +0530 Subject: [PATCH 21/27] Support object fields in star-tree index (#16728) --------- Signed-off-by: bharath-techie --- CHANGELOG.md | 2 + .../index/mapper/StarTreeMapperIT.java | 371 +++++++++++++++++- .../index/mapper/DocumentParser.java | 14 +- .../index/mapper/MapperService.java | 25 ++ .../index/mapper/StarTreeMapper.java | 43 +- .../index/mapper/StarTreeMapperTests.java | 79 +++- 6 files changed, 528 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f813fecf66cf..0efb53beb6e31 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Introduce a setting to disable download of full cluster state from remote on term mismatch([#16798](https://github.com/opensearch-project/OpenSearch/pull/16798/)) - Added ability to retrieve value from DocValues in a flat_object filed([#16802](https://github.com/opensearch-project/OpenSearch/pull/16802)) - Introduce framework for auxiliary transports and an experimental gRPC transport plugin ([#16534](https://github.com/opensearch-project/OpenSearch/pull/16534)) +- Changes to support IP field in star tree indexing([#16641](https://github.com/opensearch-project/OpenSearch/pull/16641/)) +- Support object fields in star-tree index([#16728](https://github.com/opensearch-project/OpenSearch/pull/16728/)) ### Dependencies - Bump `com.google.cloud:google-cloud-core-http` from 2.23.0 to 2.47.0 ([#16504](https://github.com/opensearch-project/OpenSearch/pull/16504)) diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java index 3f9053576329c..1d01f717aad1f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java @@ -26,6 +26,8 @@ import org.opensearch.index.compositeindex.datacube.DataCubeDateTimeUnit; import org.opensearch.index.compositeindex.datacube.DateDimension; import org.opensearch.index.compositeindex.datacube.MetricStat; +import org.opensearch.index.compositeindex.datacube.NumericDimension; +import org.opensearch.index.compositeindex.datacube.OrdinalDimension; import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; import org.opensearch.index.compositeindex.datacube.startree.StarTreeIndexSettings; import org.opensearch.index.compositeindex.datacube.startree.utils.date.DateTimeUnitAdapter; @@ -41,6 +43,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; @@ -121,6 +124,187 @@ private static XContentBuilder createMinimalTestMapping(boolean invalidDim, bool } } + private static XContentBuilder createNestedTestMapping() { + try { + return jsonBuilder().startObject() + .startObject("composite") + .startObject("startree-1") + .field("type", "star_tree") + .startObject("config") + .startObject("date_dimension") + .field("name", "timestamp") + .endObject() + .startArray("ordered_dimensions") + .startObject() + .field("name", "nested.nested1.status") + .endObject() + .startObject() + .field("name", "nested.nested1.keyword_dv") + .endObject() + .endArray() + .startArray("metrics") + .startObject() + .field("name", "nested3.numeric_dv") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .startObject("properties") + .startObject("timestamp") + .field("type", "date") + .endObject() + .startObject("nested3") + .startObject("properties") + .startObject("numeric_dv") + .field("type", "integer") + .field("doc_values", true) + .endObject() + .endObject() + .endObject() + .startObject("numeric") + .field("type", "integer") + .field("doc_values", false) + .endObject() + .startObject("nested") + .startObject("properties") + .startObject("nested1") + .startObject("properties") + .startObject("status") + .field("type", "integer") + .field("doc_values", true) + .endObject() + .startObject("keyword_dv") + .field("type", "keyword") + .field("doc_values", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .startObject("nested-not-startree") + .startObject("properties") + .startObject("nested1") + .startObject("properties") + .startObject("status") + .field("type", "integer") + .field("doc_values", true) + .endObject() + .startObject("keyword_dv") + .field("type", "keyword") + .field("doc_values", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .startObject("keyword") + .field("type", "keyword") + .field("doc_values", false) + .endObject() + .startObject("ip") + .field("type", "ip") + .field("doc_values", false) + .endObject() + .endObject() + .endObject(); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } + + private static XContentBuilder createNestedTestMappingForArray() { + try { + return jsonBuilder().startObject() + .startObject("composite") + .startObject("startree-1") + .field("type", "star_tree") + .startObject("config") + .startObject("date_dimension") + .field("name", "timestamp") + .endObject() + .startArray("ordered_dimensions") + .startObject() + .field("name", "status") + .endObject() + .startObject() + .field("name", "nested.nested1.keyword_dv") + .endObject() + .endArray() + .startArray("metrics") + .startObject() + .field("name", "nested3.numeric_dv") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .startObject("properties") + .startObject("timestamp") + .field("type", "date") + .endObject() + .startObject("status") + .field("type", "integer") + .endObject() + .startObject("nested3") + .startObject("properties") + .startObject("numeric_dv") + .field("type", "integer") + .field("doc_values", true) + .endObject() + .endObject() + .endObject() + .startObject("numeric") + .field("type", "integer") + .field("doc_values", false) + .endObject() + .startObject("nested") + .startObject("properties") + .startObject("nested1") + .startObject("properties") + .startObject("status") + .field("type", "integer") + .field("doc_values", true) + .endObject() + .startObject("keyword_dv") + .field("type", "keyword") + .field("doc_values", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .startObject("nested-not-startree") + .startObject("properties") + .startObject("nested1") + .startObject("properties") + .startObject("status") + .field("type", "integer") + .field("doc_values", true) + .endObject() + .startObject("keyword_dv") + .field("type", "keyword") + .field("doc_values", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .startObject("keyword") + .field("type", "keyword") + .field("doc_values", false) + .endObject() + .startObject("ip") + .field("type", "ip") + .field("doc_values", false) + .endObject() + .endObject() + .endObject(); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } + private static XContentBuilder createDateTestMapping(boolean duplicate) { try { return jsonBuilder().startObject() @@ -475,6 +659,46 @@ public void testValidCompositeIndexWithDates() { } } + public void testValidCompositeIndexWithNestedFields() { + prepareCreate(TEST_INDEX).setMapping(createNestedTestMapping()).setSettings(settings).get(); + Iterable dataNodeInstances = internalCluster().getDataNodeInstances(IndicesService.class); + for (IndicesService service : dataNodeInstances) { + final Index index = resolveIndex("test"); + if (service.hasIndex(index)) { + IndexService indexService = service.indexService(index); + Set fts = indexService.mapperService().getCompositeFieldTypes(); + + for (CompositeMappedFieldType ft : fts) { + assertTrue(ft instanceof StarTreeMapper.StarTreeFieldType); + StarTreeMapper.StarTreeFieldType starTreeFieldType = (StarTreeMapper.StarTreeFieldType) ft; + assertEquals("timestamp", starTreeFieldType.getDimensions().get(0).getField()); + assertTrue(starTreeFieldType.getDimensions().get(0) instanceof DateDimension); + DateDimension dateDim = (DateDimension) starTreeFieldType.getDimensions().get(0); + List expectedTimeUnits = Arrays.asList( + new DateTimeUnitAdapter(Rounding.DateTimeUnit.MINUTES_OF_HOUR), + DataCubeDateTimeUnit.HALF_HOUR_OF_DAY + ); + for (int i = 0; i < dateDim.getIntervals().size(); i++) { + assertEquals(expectedTimeUnits.get(i).shortName(), dateDim.getSortedCalendarIntervals().get(i).shortName()); + } + assertEquals("nested.nested1.status", starTreeFieldType.getDimensions().get(1).getField()); + assertTrue(starTreeFieldType.getDimensions().get(1) instanceof NumericDimension); + assertEquals("nested.nested1.keyword_dv", starTreeFieldType.getDimensions().get(2).getField()); + assertTrue(starTreeFieldType.getDimensions().get(2) instanceof OrdinalDimension); + assertEquals("nested3.numeric_dv", starTreeFieldType.getMetrics().get(0).getField()); + List expectedMetrics = Arrays.asList(MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.AVG); + assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); + assertEquals(10000, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); + assertEquals( + StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP, + starTreeFieldType.getStarTreeConfig().getBuildMode() + ); + assertEquals(Collections.emptySet(), starTreeFieldType.getStarTreeConfig().getSkipStarNodeCreationInDims()); + } + } + } + } + public void testValidCompositeIndexWithDuplicateDates() { prepareCreate(TEST_INDEX).setMapping(createDateTestMapping(true)).setSettings(settings).get(); Iterable dataNodeInstances = internalCluster().getDataNodeInstances(IndicesService.class); @@ -563,11 +787,156 @@ public void testCompositeIndexWithArraysInCompositeField() throws IOException { () -> client().prepareIndex(TEST_INDEX).setSource(doc).get() ); assertEquals( - "object mapping for [_doc] with array for [numeric_dv] cannot be accepted as field is also part of composite index mapping which does not accept arrays", + "object mapping for [_doc] with array for [numeric_dv] cannot be accepted, as the field is also part of composite index mapping which does not accept arrays", ex.getMessage() ); } + public void testCompositeIndexWithArraysInNestedCompositeField() throws IOException { + // here nested.nested1.status is part of the composite field but "nested" field itself is an array + prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createNestedTestMapping()).get(); + // Attempt to index a document with an array field + XContentBuilder doc = jsonBuilder().startObject() + .field("timestamp", "2023-06-01T12:00:00Z") + .startArray("nested") + .startObject() + .startArray("nested1") + .startObject() + .field("status", 10) + .endObject() + .startObject() + .field("status", 10) + .endObject() + .startObject() + .field("status", 10) + .endObject() + .endArray() + .endObject() + .endArray() + .endObject(); + // Index the document and refresh + MapperParsingException ex = expectThrows( + MapperParsingException.class, + () -> client().prepareIndex(TEST_INDEX).setSource(doc).get() + ); + assertEquals( + "object mapping for [_doc] with array for [nested] cannot be accepted, as the field is also part of composite index mapping which does not accept arrays", + ex.getMessage() + ); + } + + public void testCompositeIndexWithArraysInChildNestedCompositeField() throws IOException { + prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createNestedTestMapping()).get(); + // here nested.nested1.status is part of the composite field but "nested.nested1" field is an array + XContentBuilder doc = jsonBuilder().startObject() + .field("timestamp", "2023-06-01T12:00:00Z") + .startObject("nested") + .startArray("nested1") + .startObject() + .field("status", 10) + .endObject() + .startObject() + .field("status", 10) + .endObject() + .startObject() + .field("status", 10) + .endObject() + .endArray() + .endObject() + .endObject(); + // Index the document and refresh + MapperParsingException ex = expectThrows( + MapperParsingException.class, + () -> client().prepareIndex(TEST_INDEX).setSource(doc).get() + ); + assertEquals( + "object mapping for [nested] with array for [nested1] cannot be accepted, as the field is also part of composite index mapping which does not accept arrays", + ex.getMessage() + ); + } + + public void testCompositeIndexWithArraysInNestedCompositeFieldSameNameAsNormalField() throws IOException { + prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createNestedTestMappingForArray()).get(); + // here status is part of the composite field but "nested.nested1.status" field is an array which is not + // part of composite field + XContentBuilder doc = jsonBuilder().startObject() + .field("timestamp", "2023-06-01T12:00:00Z") + .startObject("nested") + .startObject("nested1") + .startArray("status") + .value(10) + .value(20) + .value(30) + .endArray() + .endObject() + .endObject() + .field("status", "200") + .endObject(); + // Index the document and refresh + // Index the document and refresh + IndexResponse indexResponse = client().prepareIndex(TEST_INDEX).setSource(doc).get(); + + assertEquals(RestStatus.CREATED, indexResponse.status()); + + client().admin().indices().prepareRefresh(TEST_INDEX).get(); + // Verify the document was indexed + SearchResponse searchResponse = client().prepareSearch(TEST_INDEX).setQuery(QueryBuilders.matchAllQuery()).get(); + + assertEquals(1, searchResponse.getHits().getTotalHits().value); + + // Verify the values in the indexed document + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals("2023-06-01T12:00:00Z", hit.getSourceAsMap().get("timestamp")); + + int values = Integer.parseInt((String) hit.getSourceAsMap().get("status")); + assertEquals(200, values); + } + + public void testCompositeIndexWithNestedArraysInNonCompositeField() throws IOException { + prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createNestedTestMapping()).get(); + // Attempt to index a document with an array field + XContentBuilder doc = jsonBuilder().startObject() + .field("timestamp", "2023-06-01T12:00:00Z") + .startObject("nested-not-startree") + .startArray("nested1") + .startObject() + .field("status", 10) + .endObject() + .startObject() + .field("status", 20) + .endObject() + .startObject() + .field("status", 30) + .endObject() + .endArray() + .endObject() + .endObject(); + + // Index the document and refresh + IndexResponse indexResponse = client().prepareIndex(TEST_INDEX).setSource(doc).get(); + + assertEquals(RestStatus.CREATED, indexResponse.status()); + + client().admin().indices().prepareRefresh(TEST_INDEX).get(); + // Verify the document was indexed + SearchResponse searchResponse = client().prepareSearch(TEST_INDEX).setQuery(QueryBuilders.matchAllQuery()).get(); + + assertEquals(1, searchResponse.getHits().getTotalHits().value); + + // Verify the values in the indexed document + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals("2023-06-01T12:00:00Z", hit.getSourceAsMap().get("timestamp")); + + List values = (List) ((Map) (hit.getSourceAsMap().get("nested-not-startree"))).get("nested1"); + assertEquals(3, values.size()); + int i = 1; + for (Object val : values) { + Map valMap = (Map) val; + assertEquals(10 * i, valMap.get("status")); + i++; + } + } + public void testCompositeIndexWithArraysInNonCompositeField() throws IOException { prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createMinimalTestMapping(false, false, false)).get(); // Attempt to index a document with an array field diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java index 50ff816695156..134baa70f80c2 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java @@ -661,12 +661,22 @@ private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapp throws IOException { XContentParser parser = context.parser(); XContentParser.Token token; + String path = context.path().pathAsText(arrayFieldName); + boolean isNested = path.contains(".") || context.mapperService().isCompositeIndexFieldNestedField(path); // block array values for composite index fields - if (context.indexSettings().isCompositeIndex() && context.mapperService().isFieldPartOfCompositeIndex(arrayFieldName)) { + // Assume original index has 2 fields - status , nested.nested1.status + // case 1 : if status is part of composite index and nested.nested1.status is not part of composite index, + // then nested.nested1.status/nested.nested1/nested array should not be blocked + // case 2 : if nested.nested1.status is part of composite index and status is not part of composite index, + // then arrays in nested/nested.nested1 and nested.nested1.status fields should be blocked + // but arrays in status should not be blocked + if (context.indexSettings().isCompositeIndex() + && ((isNested == false && context.mapperService().isFieldPartOfCompositeIndex(arrayFieldName)) + || (isNested && context.mapperService().isCompositeIndexFieldNestedField(path)))) { throw new MapperParsingException( String.format( Locale.ROOT, - "object mapping for [%s] with array for [%s] cannot be accepted as field is also part of composite index mapping which does not accept arrays", + "object mapping for [%s] with array for [%s] cannot be accepted, as the field is also part of composite index mapping which does not accept arrays", mapper.name(), arrayFieldName ) diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index 84b0b1d69432d..5a7c6a0102052 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -228,6 +228,7 @@ public enum MergeReason { private volatile Set compositeMappedFieldTypes; private volatile Set fieldsPartOfCompositeMappings; + private volatile Set nestedFieldsPartOfCompositeMappings; public MapperService( IndexSettings indexSettings, @@ -554,10 +555,29 @@ private synchronized Map internalMerge(DocumentMapper ma private void buildCompositeFieldLookup() { Set fieldsPartOfCompositeMappings = new HashSet<>(); + Set nestedFieldsPartOfCompositeMappings = new HashSet<>(); + for (CompositeMappedFieldType fieldType : compositeMappedFieldTypes) { fieldsPartOfCompositeMappings.addAll(fieldType.fields()); + + for (String field : fieldType.fields()) { + String[] parts = field.split("\\."); + if (parts.length > 1) { + StringBuilder path = new StringBuilder(); + for (int i = 0; i < parts.length; i++) { + if (i == 0) { + path.append(parts[i]); + } else { + path.append(".").append(parts[i]); + } + nestedFieldsPartOfCompositeMappings.add(path.toString()); + } + } + } } + this.fieldsPartOfCompositeMappings = fieldsPartOfCompositeMappings; + this.nestedFieldsPartOfCompositeMappings = nestedFieldsPartOfCompositeMappings; } private boolean assertSerialization(DocumentMapper mapper) { @@ -690,6 +710,11 @@ public boolean isFieldPartOfCompositeIndex(String field) { return fieldsPartOfCompositeMappings.contains(field); } + public boolean isCompositeIndexFieldNestedField(String field) { + return nestedFieldsPartOfCompositeMappings.contains(field); + + } + public ObjectMapper getObjectMapper(String name) { return this.mapper == null ? null : this.mapper.objectMappers().get(name); } diff --git a/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java b/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java index 40f05a8b76755..7b361e12330a3 100644 --- a/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java @@ -23,6 +23,7 @@ import org.opensearch.search.lookup.SearchLookup; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.LinkedList; @@ -431,8 +432,46 @@ private static boolean isBuilderAllowedForMetric(Mapper.Builder builder) { return builder.isDataCubeMetricSupported(); } - private Optional findMapperBuilderByName(String field, List mappersBuilders) { - return mappersBuilders.stream().filter(builder -> builder.name().equals(field)).findFirst(); + private Optional findMapperBuilderByName(String name, List mappersBuilders) { + String[] parts = name.split("\\."); + + // Start with the top-level builders + Optional currentBuilder = mappersBuilders.stream() + .filter(builder -> builder.name().equals(parts[0])) + .findFirst(); + + // If we can't find the first part, or if there's only one part, return the result + if (currentBuilder.isEmpty() || parts.length == 1) { + return currentBuilder; + } + + // Navigate through the nested structure + try { + Mapper.Builder builder = currentBuilder.get(); + for (int i = 1; i < parts.length; i++) { + List childBuilders = getChildBuilders(builder); + int finalI = i; + builder = childBuilders.stream() + .filter(b -> b.name().equals(parts[finalI])) + .findFirst() + .orElseThrow( + () -> new IllegalArgumentException( + String.format(Locale.ROOT, "Could not find nested field [%s] in path [%s]", parts[finalI], name) + ) + ); + } + return Optional.of(builder); + } catch (Exception e) { + return Optional.empty(); + } + } + + // Helper method to get child builders from a parent builder + private List getChildBuilders(Mapper.Builder builder) { + if (builder instanceof ObjectMapper.Builder) { + return ((ObjectMapper.Builder) builder).mappersBuilders; + } + return Collections.emptyList(); } public Builder(String name, ObjectMapper.Builder objBuilder) { diff --git a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java index 333cdbcab05c5..684704ad65b0a 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java @@ -111,7 +111,7 @@ public void testCompositeIndexWithArraysInCompositeField() throws IOException { () -> mapper.parse(source(b -> b.startArray("status").value(0).value(1).endArray())) ); assertEquals( - "object mapping for [_doc] with array for [status] cannot be accepted as field is also part of composite index mapping which does not accept arrays", + "object mapping for [_doc] with array for [status] cannot be accepted, as the field is also part of composite index mapping which does not accept arrays", ex.getMessage() ); ParsedDocument doc = mapper.parse(source(b -> b.startArray("size").value(0).value(1).endArray())); @@ -284,6 +284,33 @@ public void testValidStarTreeDateDims() throws IOException { } } + public void testValidStarTreeNestedFields() throws IOException { + MapperService mapperService = createMapperService(getMinMappingWithNestedField()); + Set compositeFieldTypes = mapperService.getCompositeFieldTypes(); + for (CompositeMappedFieldType type : compositeFieldTypes) { + StarTreeMapper.StarTreeFieldType starTreeFieldType = (StarTreeMapper.StarTreeFieldType) type; + assertEquals("@timestamp", starTreeFieldType.getDimensions().get(0).getField()); + assertTrue(starTreeFieldType.getDimensions().get(0) instanceof DateDimension); + DateDimension dateDim = (DateDimension) starTreeFieldType.getDimensions().get(0); + List expectedDimensionFields = Arrays.asList("@timestamp_minute", "@timestamp_half-hour"); + assertEquals(expectedDimensionFields, dateDim.getSubDimensionNames()); + List expectedTimeUnits = Arrays.asList( + new DateTimeUnitAdapter(Rounding.DateTimeUnit.MINUTES_OF_HOUR), + DataCubeDateTimeUnit.HALF_HOUR_OF_DAY + ); + for (int i = 0; i < expectedTimeUnits.size(); i++) { + assertEquals(expectedTimeUnits.get(i).shortName(), dateDim.getSortedCalendarIntervals().get(i).shortName()); + } + assertEquals("nested.status", starTreeFieldType.getDimensions().get(1).getField()); + assertEquals("nested.status", starTreeFieldType.getMetrics().get(0).getField()); + List expectedMetrics = Arrays.asList(MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.AVG); + assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); + assertEquals(10000, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); + assertEquals(StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP, starTreeFieldType.getStarTreeConfig().getBuildMode()); + assertEquals(Collections.emptySet(), starTreeFieldType.getStarTreeConfig().getSkipStarNodeCreationInDims()); + } + } + public void testInValidStarTreeMinDims() throws IOException { MapperParsingException ex = expectThrows( MapperParsingException.class, @@ -1047,6 +1074,56 @@ private XContentBuilder getMinMappingWith2StarTrees() throws IOException { }); } + private XContentBuilder getMinMappingWithNestedField() throws IOException { + return topMapping(b -> { + b.startObject("composite"); + b.startObject("startree"); + b.field("type", "star_tree"); + b.startObject("config"); + + b.startArray("ordered_dimensions"); + b.startObject(); + b.field("name", "@timestamp"); + b.endObject(); + b.startObject(); + b.field("name", "nested.status"); + b.endObject(); + b.endArray(); + + b.startArray("metrics"); + b.startObject(); + b.field("name", "nested.status"); + b.endObject(); + b.startObject(); + b.field("name", "metric_field"); + b.endObject(); + b.endArray(); + + b.endObject(); + b.endObject(); + + b.endObject(); + b.startObject("properties"); + b.startObject("@timestamp"); + b.field("type", "date"); + b.endObject(); + b.startObject("nested"); + b.startObject("properties"); + b.startObject("status"); + b.field("type", "integer"); + b.endObject(); + b.endObject(); + b.endObject(); + b.startObject("metric_field"); + b.field("type", "integer"); + b.endObject(); + b.startObject("keyword1"); + b.field("type", "keyword"); + b.endObject(); + b.endObject(); + }); + } + private XContentBuilder getInvalidMapping( boolean singleDim, boolean invalidSkipDims, From 4a53ff24adbec1d5aeb3d73548171870a3de925d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 09:57:50 -0500 Subject: [PATCH 22/27] Bump ch.qos.logback:logback-core from 1.5.12 to 1.5.16 in /test/fixtures/hdfs-fixture (#16951) * Bump ch.qos.logback:logback-core in /test/fixtures/hdfs-fixture Bumps [ch.qos.logback:logback-core](https://github.com/qos-ch/logback) from 1.5.12 to 1.5.16. - [Commits](https://github.com/qos-ch/logback/compare/v_1.5.12...v_1.5.16) --- updated-dependencies: - dependency-name: ch.qos.logback:logback-core dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + test/fixtures/hdfs-fixture/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0efb53beb6e31..82bf9dd0fea0a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,6 +57,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.netflix.nebula.ospackage-base` from 11.10.0 to 11.10.1 ([#16896](https://github.com/opensearch-project/OpenSearch/pull/16896)) - Bump `com.microsoft.azure:msal4j` from 1.17.2 to 1.18.0 ([#16918](https://github.com/opensearch-project/OpenSearch/pull/16918)) - Bump `org.apache.commons:commons-text` from 1.12.0 to 1.13.0 ([#16919](https://github.com/opensearch-project/OpenSearch/pull/16919)) +- Bump `ch.qos.logback:logback-core` from 1.5.12 to 1.5.16 ([#16951](https://github.com/opensearch-project/OpenSearch/pull/16951)) ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 02aab575bbaf0..bb2b7ebafdf81 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -74,7 +74,7 @@ dependencies { api 'org.apache.zookeeper:zookeeper:3.9.3' api "org.apache.commons:commons-text:1.13.0" api "commons-net:commons-net:3.11.1" - api "ch.qos.logback:logback-core:1.5.12" + api "ch.qos.logback:logback-core:1.5.16" api "ch.qos.logback:logback-classic:1.5.15" api "org.jboss.xnio:xnio-nio:3.8.16.Final" api 'org.jline:jline:3.28.0' From e73ffdf1d1a11587f2d25ba69ddb46fc25994919 Mon Sep 17 00:00:00 2001 From: Ruirui Zhang Date: Mon, 6 Jan 2025 10:36:36 -0800 Subject: [PATCH 23/27] [Workload Management] Add Workload Management IT (#16359) * add workload management IT Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang --------- Signed-off-by: Ruirui Zhang --- CHANGELOG.md | 1 + .../backpressure/SearchBackpressureIT.java | 10 +- .../opensearch/wlm/WorkloadManagementIT.java | 434 ++++++++++++++++++ 3 files changed, 442 insertions(+), 3 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/opensearch/wlm/WorkloadManagementIT.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 82bf9dd0fea0a..99bfecfc0eac6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Support for keyword fields in star-tree index ([#16233](https://github.com/opensearch-project/OpenSearch/pull/16233)) - Add a flag in QueryShardContext to differentiate inner hit query ([#16600](https://github.com/opensearch-project/OpenSearch/pull/16600)) - Add vertical scaling and SoftReference for snapshot repository data cache ([#16489](https://github.com/opensearch-project/OpenSearch/pull/16489)) +- [Workload Management] Add Workload Management IT ([#16359](https://github.com/opensearch-project/OpenSearch/pull/16359)) - Support prefix list for remote repository attributes([#16271](https://github.com/opensearch-project/OpenSearch/pull/16271)) - Add new configuration setting `synonym_analyzer`, to the `synonym` and `synonym_graph` filters, enabling the specification of a custom analyzer for reading the synonym file ([#16488](https://github.com/opensearch-project/OpenSearch/pull/16488)). - Add stats for remote publication failure and move download failure stats to remote methods([#16682](https://github.com/opensearch-project/OpenSearch/pull/16682/)) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java index 40c9301ef4bce..d200b9177353a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java @@ -314,7 +314,7 @@ public void testSearchCancellationWithBackpressureDisabled() throws InterruptedE assertNull("SearchShardTask shouldn't have cancelled for monitor_only mode", caughtException); } - private static class ExceptionCatchingListener implements ActionListener { + public static class ExceptionCatchingListener implements ActionListener { private final CountDownLatch latch; private Exception exception = null; @@ -333,7 +333,11 @@ public void onFailure(Exception e) { latch.countDown(); } - private Exception getException() { + public CountDownLatch getLatch() { + return latch; + } + + public Exception getException() { return exception; } } @@ -349,7 +353,7 @@ private Supplier descriptionSupplier(String description) { return () -> description; } - interface TaskFactory { + public interface TaskFactory { T createTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers); } diff --git a/server/src/internalClusterTest/java/org/opensearch/wlm/WorkloadManagementIT.java b/server/src/internalClusterTest/java/org/opensearch/wlm/WorkloadManagementIT.java new file mode 100644 index 0000000000000..6b68a83da94e2 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/wlm/WorkloadManagementIT.java @@ -0,0 +1,434 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.wlm; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.ActionType; +import org.opensearch.action.search.SearchTask; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest; +import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeAction; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.ClusterStateUpdateTask; +import org.opensearch.cluster.block.ClusterBlockException; +import org.opensearch.cluster.block.ClusterBlockLevel; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.metadata.QueryGroup; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.tasks.TaskCancelledException; +import org.opensearch.core.tasks.TaskId; +import org.opensearch.plugins.ActionPlugin; +import org.opensearch.plugins.Plugin; +import org.opensearch.search.backpressure.SearchBackpressureIT.ExceptionCatchingListener; +import org.opensearch.search.backpressure.SearchBackpressureIT.TaskFactory; +import org.opensearch.search.backpressure.SearchBackpressureIT.TestResponse; +import org.opensearch.tasks.CancellableTask; +import org.opensearch.tasks.Task; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; +import org.hamcrest.MatcherAssert; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; +import static org.opensearch.threadpool.ThreadPool.Names.SAME; +import static org.opensearch.wlm.QueryGroupTask.QUERY_GROUP_ID_HEADER; +import static org.hamcrest.Matchers.instanceOf; + +public class WorkloadManagementIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + final static String PUT = "PUT"; + final static String MEMORY = "MEMORY"; + final static String CPU = "CPU"; + final static String ENABLED = "enabled"; + final static String DELETE = "DELETE"; + private static final TimeValue TIMEOUT = new TimeValue(1, TimeUnit.SECONDS); + + public WorkloadManagementIT(Settings nodeSettings) { + super(nodeSettings); + } + + @ParametersFactory + public static Collection parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Collection> nodePlugins() { + final List> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(TestClusterUpdatePlugin.class); + return plugins; + } + + @Before + public final void setupNodeSettings() { + Settings request = Settings.builder() + .put(WorkloadManagementSettings.NODE_LEVEL_MEMORY_REJECTION_THRESHOLD.getKey(), 0.8) + .put(WorkloadManagementSettings.NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD.getKey(), 0.9) + .put(WorkloadManagementSettings.NODE_LEVEL_CPU_REJECTION_THRESHOLD.getKey(), 0.8) + .put(WorkloadManagementSettings.NODE_LEVEL_CPU_CANCELLATION_THRESHOLD.getKey(), 0.9) + .build(); + assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(request).get()); + } + + @After + public final void cleanupNodeSettings() { + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull("*")) + .setTransientSettings(Settings.builder().putNull("*")) + ); + } + + public void testHighCPUInEnforcedMode() throws InterruptedException { + Settings request = Settings.builder().put(WorkloadManagementSettings.WLM_MODE_SETTING.getKey(), ENABLED).build(); + assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(request).get()); + QueryGroup queryGroup = new QueryGroup( + "name", + new MutableQueryGroupFragment( + MutableQueryGroupFragment.ResiliencyMode.ENFORCED, + Map.of(ResourceType.CPU, 0.01, ResourceType.MEMORY, 0.01) + ) + ); + updateQueryGroupInClusterState(PUT, queryGroup); + Exception caughtException = executeQueryGroupTask(CPU, queryGroup.get_id()); + assertNotNull("SearchTask should have been cancelled with TaskCancelledException", caughtException); + MatcherAssert.assertThat(caughtException, instanceOf(TaskCancelledException.class)); + updateQueryGroupInClusterState(DELETE, queryGroup); + } + + public void testHighCPUInMonitorMode() throws InterruptedException { + QueryGroup queryGroup = new QueryGroup( + "name", + new MutableQueryGroupFragment( + MutableQueryGroupFragment.ResiliencyMode.ENFORCED, + Map.of(ResourceType.CPU, 0.01, ResourceType.MEMORY, 0.01) + ) + ); + updateQueryGroupInClusterState(PUT, queryGroup); + Exception caughtException = executeQueryGroupTask(CPU, queryGroup.get_id()); + assertNull(caughtException); + updateQueryGroupInClusterState(DELETE, queryGroup); + } + + public void testHighMemoryInEnforcedMode() throws InterruptedException { + Settings request = Settings.builder().put(WorkloadManagementSettings.WLM_MODE_SETTING.getKey(), ENABLED).build(); + assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(request).get()); + QueryGroup queryGroup = new QueryGroup( + "name", + new MutableQueryGroupFragment(MutableQueryGroupFragment.ResiliencyMode.ENFORCED, Map.of(ResourceType.MEMORY, 0.01)) + ); + updateQueryGroupInClusterState(PUT, queryGroup); + Exception caughtException = executeQueryGroupTask(MEMORY, queryGroup.get_id()); + assertNotNull("SearchTask should have been cancelled with TaskCancelledException", caughtException); + MatcherAssert.assertThat(caughtException, instanceOf(TaskCancelledException.class)); + updateQueryGroupInClusterState(DELETE, queryGroup); + } + + public void testHighMemoryInMonitorMode() throws InterruptedException { + QueryGroup queryGroup = new QueryGroup( + "name", + new MutableQueryGroupFragment(MutableQueryGroupFragment.ResiliencyMode.ENFORCED, Map.of(ResourceType.MEMORY, 0.01)) + ); + updateQueryGroupInClusterState(PUT, queryGroup); + Exception caughtException = executeQueryGroupTask(MEMORY, queryGroup.get_id()); + assertNull("SearchTask should have been cancelled with TaskCancelledException", caughtException); + updateQueryGroupInClusterState(DELETE, queryGroup); + } + + public void testNoCancellation() throws InterruptedException { + QueryGroup queryGroup = new QueryGroup( + "name", + new MutableQueryGroupFragment( + MutableQueryGroupFragment.ResiliencyMode.ENFORCED, + Map.of(ResourceType.CPU, 0.8, ResourceType.MEMORY, 0.8) + ) + ); + updateQueryGroupInClusterState(PUT, queryGroup); + Exception caughtException = executeQueryGroupTask(CPU, queryGroup.get_id()); + assertNull(caughtException); + updateQueryGroupInClusterState(DELETE, queryGroup); + } + + public Exception executeQueryGroupTask(String resourceType, String queryGroupId) throws InterruptedException { + ExceptionCatchingListener listener = new ExceptionCatchingListener(); + client().execute( + TestQueryGroupTaskTransportAction.ACTION, + new TestQueryGroupTaskRequest( + resourceType, + queryGroupId, + (TaskFactory) (id, type, action, description, parentTaskId, headers) -> new SearchTask( + id, + type, + action, + () -> description, + parentTaskId, + headers + ) + ), + listener + ); + assertTrue(listener.getLatch().await(TIMEOUT.getSeconds() + 1, TimeUnit.SECONDS)); + return listener.getException(); + } + + public void updateQueryGroupInClusterState(String method, QueryGroup queryGroup) throws InterruptedException { + ExceptionCatchingListener listener = new ExceptionCatchingListener(); + client().execute(TestClusterUpdateTransportAction.ACTION, new TestClusterUpdateRequest(queryGroup, method), listener); + assertTrue(listener.getLatch().await(TIMEOUT.getSeconds(), TimeUnit.SECONDS)); + assertEquals(0, listener.getLatch().getCount()); + } + + public static class TestClusterUpdateRequest extends ClusterManagerNodeRequest { + final private String method; + final private QueryGroup queryGroup; + + public TestClusterUpdateRequest(QueryGroup queryGroup, String method) { + this.method = method; + this.queryGroup = queryGroup; + } + + public TestClusterUpdateRequest(StreamInput in) throws IOException { + super(in); + this.method = in.readString(); + this.queryGroup = new QueryGroup(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(method); + queryGroup.writeTo(out); + } + + public QueryGroup getQueryGroup() { + return queryGroup; + } + + public String getMethod() { + return method; + } + } + + public static class TestClusterUpdateTransportAction extends TransportClusterManagerNodeAction { + public static final ActionType ACTION = new ActionType<>("internal::test_cluster_update_action", TestResponse::new); + + @Inject + public TestClusterUpdateTransportAction( + ThreadPool threadPool, + TransportService transportService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService + ) { + super( + ACTION.name(), + transportService, + clusterService, + threadPool, + actionFilters, + TestClusterUpdateRequest::new, + indexNameExpressionResolver + ); + } + + @Override + protected String executor() { + return SAME; + } + + @Override + protected TestResponse read(StreamInput in) throws IOException { + return new TestResponse(in); + } + + @Override + protected ClusterBlockException checkBlock(TestClusterUpdateRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected void clusterManagerOperation( + TestClusterUpdateRequest request, + ClusterState clusterState, + ActionListener listener + ) { + clusterService.submitStateUpdateTask("query-group-persistence-service", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + Map currentGroups = currentState.metadata().queryGroups(); + QueryGroup queryGroup = request.getQueryGroup(); + String id = queryGroup.get_id(); + String method = request.getMethod(); + Metadata metadata; + if (method.equals(PUT)) { // create + metadata = Metadata.builder(currentState.metadata()).put(queryGroup).build(); + } else { // delete + metadata = Metadata.builder(currentState.metadata()).remove(currentGroups.get(id)).build(); + } + return ClusterState.builder(currentState).metadata(metadata).build(); + } + + @Override + public void onFailure(String source, Exception e) { + listener.onFailure(e); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + listener.onResponse(new TestResponse()); + } + }); + } + } + + public static class TestQueryGroupTaskRequest extends ActionRequest { + private final String type; + private final String queryGroupId; + private TaskFactory taskFactory; + + public TestQueryGroupTaskRequest(String type, String queryGroupId, TaskFactory taskFactory) { + this.type = type; + this.queryGroupId = queryGroupId; + this.taskFactory = taskFactory; + } + + public TestQueryGroupTaskRequest(StreamInput in) throws IOException { + super(in); + this.type = in.readString(); + this.queryGroupId = in.readString(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return taskFactory.createTask(id, type, action, "", parentTaskId, headers); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(type); + out.writeString(queryGroupId); + } + + public String getType() { + return type; + } + + public String getQueryGroupId() { + return queryGroupId; + } + } + + public static class TestQueryGroupTaskTransportAction extends HandledTransportAction { + public static final ActionType ACTION = new ActionType<>("internal::test_query_group_task_action", TestResponse::new); + private final ThreadPool threadPool; + + @Inject + public TestQueryGroupTaskTransportAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters) { + super(ACTION.name(), transportService, actionFilters, TestQueryGroupTaskRequest::new); + this.threadPool = threadPool; + } + + @Override + protected void doExecute(Task task, TestQueryGroupTaskRequest request, ActionListener listener) { + threadPool.getThreadContext().putHeader(QUERY_GROUP_ID_HEADER, request.getQueryGroupId()); + threadPool.executor(ThreadPool.Names.SEARCH).execute(() -> { + try { + CancellableTask cancellableTask = (CancellableTask) task; + ((QueryGroupTask) task).setQueryGroupId(threadPool.getThreadContext()); + assertEquals(request.getQueryGroupId(), ((QueryGroupTask) task).getQueryGroupId()); + long startTime = System.nanoTime(); + while (System.nanoTime() - startTime < TIMEOUT.getNanos()) { + doWork(request); + if (cancellableTask.isCancelled()) { + break; + } + } + if (cancellableTask.isCancelled()) { + throw new TaskCancelledException(cancellableTask.getReasonCancelled()); + } else { + listener.onResponse(new TestResponse()); + } + } catch (Exception e) { + listener.onFailure(e); + } + }); + } + + private void doWork(TestQueryGroupTaskRequest request) throws InterruptedException { + switch (request.getType()) { + case "CPU": + long i = 0, j = 1, k = 1, iterations = 1000; + do { + j += i; + k *= j; + i++; + } while (i < iterations); + break; + case "MEMORY": + int bytesToAllocate = (int) (Runtime.getRuntime().totalMemory() * 0.01); + Byte[] bytes = new Byte[bytesToAllocate]; + int[] ints = new int[bytesToAllocate]; + break; + } + } + } + + public static class TestClusterUpdatePlugin extends Plugin implements ActionPlugin { + @Override + public List> getActions() { + return Arrays.asList( + new ActionHandler<>(TestClusterUpdateTransportAction.ACTION, TestClusterUpdateTransportAction.class), + new ActionHandler<>(TestQueryGroupTaskTransportAction.ACTION, TestQueryGroupTaskTransportAction.class) + ); + } + + @Override + public List> getClientActions() { + return Arrays.asList(TestClusterUpdateTransportAction.ACTION, TestQueryGroupTaskTransportAction.ACTION); + } + } +} From aca373b6b9d4f1afa9507874bdf64f8f9924f9fb Mon Sep 17 00:00:00 2001 From: Rishabh Singh Date: Mon, 6 Jan 2025 11:27:08 -0800 Subject: [PATCH 24/27] Add new benchmark config for nested workload (#16956) Signed-off-by: Rishabh Singh --- .github/benchmark-configs.json | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/benchmark-configs.json b/.github/benchmark-configs.json index b3590f8a2f942..1c80f5048a611 100644 --- a/.github/benchmark-configs.json +++ b/.github/benchmark-configs.json @@ -256,5 +256,21 @@ "data_instance_config": "4vCPU, 32G Mem, 16G Heap" }, "baseline_cluster_config": "x64-r5.xlarge-1-shard-0-replica-snapshot-baseline" + }, + "id_16": { + "description": "Benchmarking config for NESTED workload, benchmarks nested queries with inner-hits", + "supported_major_versions": ["2", "3"], + "cluster-benchmark-configs": { + "SINGLE_NODE_CLUSTER": "true", + "MIN_DISTRIBUTION": "true", + "TEST_WORKLOAD": "nested", + "WORKLOAD_PARAMS": "{\"number_of_replicas\":\"0\",\"number_of_shards\":\"1\"}", + "CAPTURE_NODE_STAT": "true" + }, + "cluster_configuration": { + "size": "Single-Node", + "data_instance_config": "4vCPU, 32G Mem, 16G Heap" + }, + "baseline_cluster_config": "x64-r5.xlarge-single-node-1-shard-0-replica-baseline" + } } -} From dd9695362a9d6db1c3ee2117c269f025155d4957 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 16:52:48 -0500 Subject: [PATCH 25/27] Bump com.azure:azure-core-http-netty from 1.15.5 to 1.15.7 in /plugins/repository-azure (#16952) * Bump com.azure:azure-core-http-netty in /plugins/repository-azure Bumps [com.azure:azure-core-http-netty](https://github.com/Azure/azure-sdk-for-java) from 1.15.5 to 1.15.7. - [Release notes](https://github.com/Azure/azure-sdk-for-java/releases) - [Commits](https://github.com/Azure/azure-sdk-for-java/compare/azure-core-http-netty_1.15.5...azure-core-http-netty_1.15.7) --- updated-dependencies: - dependency-name: com.azure:azure-core-http-netty dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + plugins/repository-azure/build.gradle | 2 +- .../licenses/azure-core-http-netty-1.15.5.jar.sha1 | 1 - .../licenses/azure-core-http-netty-1.15.7.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.15.5.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.15.7.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 99bfecfc0eac6..bcf1904db8d27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -59,6 +59,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.microsoft.azure:msal4j` from 1.17.2 to 1.18.0 ([#16918](https://github.com/opensearch-project/OpenSearch/pull/16918)) - Bump `org.apache.commons:commons-text` from 1.12.0 to 1.13.0 ([#16919](https://github.com/opensearch-project/OpenSearch/pull/16919)) - Bump `ch.qos.logback:logback-core` from 1.5.12 to 1.5.16 ([#16951](https://github.com/opensearch-project/OpenSearch/pull/16951)) +- Bump `com.azure:azure-core-http-netty` from 1.15.5 to 1.15.7 ([#16952](https://github.com/opensearch-project/OpenSearch/pull/16952)) ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 03ea07623dbaf..ad12ec9003e64 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -48,7 +48,7 @@ dependencies { api 'com.azure:azure-json:1.3.0' api 'com.azure:azure-xml:1.1.0' api 'com.azure:azure-storage-common:12.28.0' - api 'com.azure:azure-core-http-netty:1.15.5' + api 'com.azure:azure-core-http-netty:1.15.7' api "io.netty:netty-codec-dns:${versions.netty}" api "io.netty:netty-codec-socks:${versions.netty}" api "io.netty:netty-codec-http2:${versions.netty}" diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.15.5.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.5.jar.sha1 deleted file mode 100644 index 2f5239cc26148..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-http-netty-1.15.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -44d99705d3759e2ad7ee8110f811d4ed304a6a7c \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.15.7.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.7.jar.sha1 new file mode 100644 index 0000000000000..d72f835c69903 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.7.jar.sha1 @@ -0,0 +1 @@ +a83247eeeb7f63f891e725228d54c3c24132c66a \ No newline at end of file From 0b365998ed6e4f537dbdf7983a077bc53e785bb9 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Mon, 6 Jan 2025 16:23:59 -0800 Subject: [PATCH 26/27] Always use constant_score query for match_only_text (#16964) In some cases, when we create a term query over a `match_only_text` field, it may still try to compute scores, which prevents early termination. We should *always* use a constant score query when querying `match_only_text`, since we don't have the statistics required to compute scores. --------- Signed-off-by: Michael Froh --- CHANGELOG.md | 1 + .../mapper/MatchOnlyTextFieldMapper.java | 11 +++++++++ .../mapper/MatchOnlyTextFieldMapperTests.java | 23 ++++++++++++++++++- .../mapper/MatchOnlyTextFieldTypeTests.java | 18 +++++++++++++++ 4 files changed, 52 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bcf1904db8d27..1b49368a20fa8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -87,6 +87,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Skip remote-repositories validations for node-joins when RepositoriesService is not in sync with cluster-state ([#16763](https://github.com/opensearch-project/OpenSearch/pull/16763)) - Fix _list/shards API failing when closed indices are present ([#16606](https://github.com/opensearch-project/OpenSearch/pull/16606)) - Fix remote shards balance ([#15335](https://github.com/opensearch-project/OpenSearch/pull/15335)) +- Always use `constant_score` query for `match_only_text` field ([#16964](https://github.com/opensearch-project/OpenSearch/pull/16964)) ### Security diff --git a/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java index fb97f8c309a70..757de65248d33 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; @@ -290,6 +291,16 @@ public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, return new SourceFieldMatchQuery(builder.build(), phrasePrefixQuery, this, context); } + @Override + public Query termQuery(Object value, QueryShardContext context) { + return new ConstantScoreQuery(super.termQuery(value, context)); + } + + @Override + public Query termQueryCaseInsensitive(Object value, QueryShardContext context) { + return new ConstantScoreQuery(super.termQueryCaseInsensitive(value, context)); + } + private List> getTermsFromTokenStream(TokenStream stream) throws IOException { final List> termArray = new ArrayList<>(); TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); diff --git a/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java index 580f8cccc9af5..d9f0fd6657085 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java @@ -15,11 +15,13 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; +import org.opensearch.common.lucene.search.AutomatonQueries; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -28,6 +30,7 @@ import org.opensearch.index.query.MatchPhraseQueryBuilder; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.SourceFieldMatchQuery; +import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.index.search.MatchQuery; import org.junit.Before; @@ -391,7 +394,7 @@ public void testPhraseQuery() throws IOException { assertThat(q, is(expectedQuery)); Query q4 = new MatchPhraseQueryBuilder("field", "singleton").toQuery(queryShardContext); - assertThat(q4, is(new TermQuery(new Term("field", "singleton")))); + assertThat(q4, is(new ConstantScoreQuery(new TermQuery(new Term("field", "singleton"))))); Query q2 = new MatchPhraseQueryBuilder("field", "three words here").toQuery(queryShardContext); expectedQuery = new SourceFieldMatchQuery( @@ -447,4 +450,22 @@ public void testPhraseQuery() throws IOException { ); assertThat(q6, is(expectedQuery)); } + + public void testTermQuery() throws Exception { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("field"); + { + b.field("type", textFieldName); + b.field("analyzer", "my_stop_analyzer"); // "standard" will be replaced with MockSynonymAnalyzer + } + b.endObject(); + })); + QueryShardContext queryShardContext = createQueryShardContext(mapperService); + + Query q = new TermQueryBuilder("field", "foo").rewrite(queryShardContext).toQuery(queryShardContext); + assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field", "foo"))), q); + + q = new TermQueryBuilder("field", "foo").caseInsensitive(true).rewrite(queryShardContext).toQuery(queryShardContext); + assertEquals(new ConstantScoreQuery(AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "foo"))), q); + } } diff --git a/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java index 51234fa04ddc2..0170cdde8b21c 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java @@ -8,7 +8,11 @@ package org.opensearch.index.mapper; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.TermQuery; import org.opensearch.common.lucene.Lucene; +import org.opensearch.common.lucene.search.AutomatonQueries; public class MatchOnlyTextFieldTypeTests extends TextFieldTypeTests { @@ -28,4 +32,18 @@ TextFieldMapper.TextFieldType createFieldType(boolean searchable) { ParametrizedFieldMapper.Parameter.metaParam().get() ); } + + @Override + public void testTermQuery() { + MappedFieldType ft = createFieldType(true); + assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field", "foo"))), ft.termQuery("foo", null)); + assertEquals( + new ConstantScoreQuery(AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo"))), + ft.termQueryCaseInsensitive("fOo", null) + ); + + MappedFieldType unsearchable = createFieldType(false); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("bar", null)); + assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + } } From e7e19f712596ca0ca0531ff5c39663cc472fc95f Mon Sep 17 00:00:00 2001 From: expani1729 <110471048+expani@users.noreply.github.com> Date: Mon, 6 Jan 2025 16:53:05 -0800 Subject: [PATCH 27/27] Changes to support unmapped fields in metric aggregation (#16481) Avoids exception when querying unmapped field when star tree experimental feature is enables. --------- Signed-off-by: expani --- .../startree/utils/StarTreeQueryHelper.java | 2 +- .../ValuesSourceAggregatorFactory.java | 2 +- .../startree/MetricAggregatorTests.java | 139 ++++++++++++++++++ .../startree/StarTreeFilterTests.java | 13 +- .../aggregations/AggregatorTestCase.java | 22 ++- 5 files changed, 172 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeQueryHelper.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeQueryHelper.java index e538be5d5bece..e46cf6f56b36e 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeQueryHelper.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeQueryHelper.java @@ -152,7 +152,7 @@ private static MetricStat validateStarTreeMetricSupport( MetricStat metricStat = ((MetricAggregatorFactory) aggregatorFactory).getMetricStat(); field = ((MetricAggregatorFactory) aggregatorFactory).getField(); - if (supportedMetrics.containsKey(field) && supportedMetrics.get(field).contains(metricStat)) { + if (field != null && supportedMetrics.containsKey(field) && supportedMetrics.get(field).contains(metricStat)) { return metricStat; } } diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSourceAggregatorFactory.java index d862b2c2784de..41344fd06cbbc 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSourceAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSourceAggregatorFactory.java @@ -104,6 +104,6 @@ public String getStatsSubtype() { } public String getField() { - return config.fieldContext().field(); + return config.fieldContext() != null ? config.fieldContext().field() : null; } } diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java index 12e83cbbadd5d..05f48eb9243af 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/MetricAggregatorTests.java @@ -28,18 +28,27 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; +import org.opensearch.common.util.MockBigArrays; +import org.opensearch.common.util.MockPageCacheRecycler; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; import org.opensearch.index.codec.composite.composite912.Composite912Codec; import org.opensearch.index.codec.composite912.datacube.startree.StarTreeDocValuesFormatTests; import org.opensearch.index.compositeindex.datacube.Dimension; +import org.opensearch.index.compositeindex.datacube.Metric; +import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.NumberFieldMapper; import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.AggregatorFactories; +import org.opensearch.search.aggregations.AggregatorFactory; import org.opensearch.search.aggregations.AggregatorTestCase; import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.metrics.AvgAggregationBuilder; @@ -49,14 +58,17 @@ import org.opensearch.search.aggregations.metrics.InternalSum; import org.opensearch.search.aggregations.metrics.InternalValueCount; import org.opensearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.opensearch.search.aggregations.metrics.MetricAggregatorFactory; import org.opensearch.search.aggregations.metrics.MinAggregationBuilder; import org.opensearch.search.aggregations.metrics.SumAggregationBuilder; import org.opensearch.search.aggregations.metrics.ValueCountAggregationBuilder; +import org.opensearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Random; @@ -69,6 +81,8 @@ import static org.opensearch.search.aggregations.AggregationBuilders.min; import static org.opensearch.search.aggregations.AggregationBuilders.sum; import static org.opensearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class MetricAggregatorTests extends AggregatorTestCase { @@ -267,6 +281,110 @@ public void testStarTreeDocValues() throws IOException { ); } + CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); + + QueryShardContext queryShardContext = queryShardContextMock( + indexSearcher, + mapperServiceMock(), + createIndexSettings(), + circuitBreakerService, + new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), circuitBreakerService).withCircuitBreaking() + ); + + MetricAggregatorFactory aggregatorFactory = mock(MetricAggregatorFactory.class); + when(aggregatorFactory.getSubFactories()).thenReturn(AggregatorFactories.EMPTY); + when(aggregatorFactory.getField()).thenReturn(FIELD_NAME); + when(aggregatorFactory.getMetricStat()).thenReturn(MetricStat.SUM); + + // Case when field and metric type in aggregation are fully supported by star tree. + testCase( + indexSearcher, + query, + queryBuilder, + sumAggregationBuilder, + starTree, + supportedDimensions, + List.of(new Metric(FIELD_NAME, List.of(MetricStat.SUM, MetricStat.MAX, MetricStat.MIN, MetricStat.AVG))), + verifyAggregation(InternalSum::getValue), + aggregatorFactory, + true + ); + + // Case when the field is not supported by star tree + SumAggregationBuilder invalidFieldSumAggBuilder = sum("_name").field("hello"); + testCase( + indexSearcher, + query, + queryBuilder, + invalidFieldSumAggBuilder, + starTree, + supportedDimensions, + Collections.emptyList(), + verifyAggregation(InternalSum::getValue), + invalidFieldSumAggBuilder.build(queryShardContext, null), + false // Invalid fields will return null StarTreeQueryContext which will not cause early termination by leaf collector + ); + + // Case when metric type in aggregation is not supported by star tree but the field is supported. + testCase( + indexSearcher, + query, + queryBuilder, + sumAggregationBuilder, + starTree, + supportedDimensions, + List.of(new Metric(FIELD_NAME, List.of(MetricStat.MAX, MetricStat.MIN, MetricStat.AVG))), + verifyAggregation(InternalSum::getValue), + aggregatorFactory, + false + ); + + // Case when field is not present in supported metrics + testCase( + indexSearcher, + query, + queryBuilder, + sumAggregationBuilder, + starTree, + supportedDimensions, + List.of(new Metric("hello", List.of(MetricStat.MAX, MetricStat.MIN, MetricStat.AVG))), + verifyAggregation(InternalSum::getValue), + aggregatorFactory, + false + ); + + AggregatorFactories aggregatorFactories = mock(AggregatorFactories.class); + when(aggregatorFactories.getFactories()).thenReturn(new AggregatorFactory[] { mock(MetricAggregatorFactory.class) }); + when(aggregatorFactory.getSubFactories()).thenReturn(aggregatorFactories); + + // Case when sub aggregations are present + testCase( + indexSearcher, + query, + queryBuilder, + sumAggregationBuilder, + starTree, + supportedDimensions, + List.of(new Metric("hello", List.of(MetricStat.MAX, MetricStat.MIN, MetricStat.AVG))), + verifyAggregation(InternalSum::getValue), + aggregatorFactory, + false + ); + + // Case when aggregation factory is not metric aggregation + testCase( + indexSearcher, + query, + queryBuilder, + sumAggregationBuilder, + starTree, + supportedDimensions, + List.of(new Metric("hello", List.of(MetricStat.MAX, MetricStat.MIN, MetricStat.AVG))), + verifyAggregation(InternalSum::getValue), + mock(ValuesSourceAggregatorFactory.class), + false + ); + ir.close(); directory.close(); } @@ -287,6 +405,21 @@ private void testC CompositeIndexFieldInfo starTree, List supportedDimensions, BiConsumer verify + ) throws IOException { + testCase(searcher, query, queryBuilder, aggBuilder, starTree, supportedDimensions, Collections.emptyList(), verify, null, true); + } + + private void testCase( + IndexSearcher searcher, + Query query, + QueryBuilder queryBuilder, + T aggBuilder, + CompositeIndexFieldInfo starTree, + List supportedDimensions, + List supportedMetrics, + BiConsumer verify, + AggregatorFactory aggregatorFactory, + boolean assertCollectorEarlyTermination ) throws IOException { V starTreeAggregation = searchAndReduceStarTree( createIndexSettings(), @@ -296,8 +429,11 @@ private void testC aggBuilder, starTree, supportedDimensions, + supportedMetrics, DEFAULT_MAX_BUCKETS, false, + aggregatorFactory, + assertCollectorEarlyTermination, DEFAULT_MAPPED_FIELD ); V expectedAggregation = searchAndReduceStarTree( @@ -308,8 +444,11 @@ private void testC aggBuilder, null, null, + null, DEFAULT_MAX_BUCKETS, false, + aggregatorFactory, + assertCollectorEarlyTermination, DEFAULT_MAPPED_FIELD ); verify.accept(expectedAggregation, starTreeAggregation); diff --git a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java index b03cb5ac7bb9d..c1cb19b9576e4 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/startree/StarTreeFilterTests.java @@ -87,7 +87,8 @@ public void testStarTreeFilterWithDocsInSVDFieldButNoStarNode() throws IOExcepti testStarTreeFilter(10, false); } - private void testStarTreeFilter(int maxLeafDoc, boolean skipStarNodeCreationForSDVDimension) throws IOException { + private Directory createStarTreeIndex(int maxLeafDoc, boolean skipStarNodeCreationForSDVDimension, List docs) + throws IOException { Directory directory = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(null); conf.setCodec(getCodec(maxLeafDoc, skipStarNodeCreationForSDVDimension)); @@ -95,7 +96,6 @@ private void testStarTreeFilter(int maxLeafDoc, boolean skipStarNodeCreationForS RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); int totalDocs = 100; - List docs = new ArrayList<>(); for (int i = 0; i < totalDocs; i++) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField(SNDV, i)); @@ -110,6 +110,15 @@ private void testStarTreeFilter(int maxLeafDoc, boolean skipStarNodeCreationForS } iw.forceMerge(1); iw.close(); + return directory; + } + + private void testStarTreeFilter(int maxLeafDoc, boolean skipStarNodeCreationForSDVDimension) throws IOException { + List docs = new ArrayList<>(); + + Directory directory = createStarTreeIndex(maxLeafDoc, skipStarNodeCreationForSDVDimension, docs); + + int totalDocs = docs.size(); DirectoryReader ir = DirectoryReader.open(directory); initValuesSourceRegistry(); diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index e1728c4476699..27142b298db52 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -93,6 +93,7 @@ import org.opensearch.index.cache.query.DisabledQueryCache; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.compositeindex.datacube.Dimension; +import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.startree.utils.StarTreeQueryHelper; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexFieldDataCache; @@ -348,7 +349,9 @@ protected CountingAggregator createCountingAggregator( IndexSettings indexSettings, CompositeIndexFieldInfo starTree, List supportedDimensions, + List supportedMetrics, MultiBucketConsumer bucketConsumer, + AggregatorFactory aggregatorFactory, MappedFieldType... fieldTypes ) throws IOException { SearchContext searchContext; @@ -360,7 +363,9 @@ protected CountingAggregator createCountingAggregator( queryBuilder, starTree, supportedDimensions, + supportedMetrics, bucketConsumer, + aggregatorFactory, fieldTypes ); } else { @@ -389,7 +394,9 @@ protected SearchContext createSearchContextWithStarTreeContext( QueryBuilder queryBuilder, CompositeIndexFieldInfo starTree, List supportedDimensions, + List supportedMetrics, MultiBucketConsumer bucketConsumer, + AggregatorFactory aggregatorFactory, MappedFieldType... fieldTypes ) throws IOException { SearchContext searchContext = createSearchContext( @@ -406,7 +413,12 @@ protected SearchContext createSearchContextWithStarTreeContext( AggregatorFactories aggregatorFactories = mock(AggregatorFactories.class); when(searchContext.aggregations()).thenReturn(searchContextAggregations); when(searchContextAggregations.factories()).thenReturn(aggregatorFactories); - when(aggregatorFactories.getFactories()).thenReturn(new AggregatorFactory[] {}); + + if (aggregatorFactory != null) { + when(aggregatorFactories.getFactories()).thenReturn(new AggregatorFactory[] { aggregatorFactory }); + } else { + when(aggregatorFactories.getFactories()).thenReturn(new AggregatorFactory[] {}); + } CompositeDataCubeFieldType compositeMappedFieldType = mock(CompositeDataCubeFieldType.class); when(compositeMappedFieldType.name()).thenReturn(starTree.getField()); @@ -414,6 +426,7 @@ protected SearchContext createSearchContextWithStarTreeContext( Set compositeFieldTypes = Set.of(compositeMappedFieldType); when((compositeMappedFieldType).getDimensions()).thenReturn(supportedDimensions); + when((compositeMappedFieldType).getMetrics()).thenReturn(supportedMetrics); MapperService mapperService = mock(MapperService.class); when(mapperService.getCompositeFieldTypes()).thenReturn(compositeFieldTypes); when(searchContext.mapperService()).thenReturn(mapperService); @@ -740,8 +753,11 @@ protected A searchAndReduc AggregationBuilder builder, CompositeIndexFieldInfo compositeIndexFieldInfo, List supportedDimensions, + List supportedMetrics, int maxBucket, boolean hasNested, + AggregatorFactory aggregatorFactory, + boolean assertCollectorEarlyTermination, MappedFieldType... fieldTypes ) throws IOException { query = query.rewrite(searcher); @@ -764,7 +780,9 @@ protected A searchAndReduc indexSettings, compositeIndexFieldInfo, supportedDimensions, + supportedMetrics, bucketConsumer, + aggregatorFactory, fieldTypes ); @@ -772,7 +790,7 @@ protected A searchAndReduc searcher.search(query, countingAggregator); countingAggregator.postCollection(); aggs.add(countingAggregator.buildTopLevel()); - if (compositeIndexFieldInfo != null) { + if (compositeIndexFieldInfo != null && assertCollectorEarlyTermination) { assertEquals(0, countingAggregator.collectCounter.get()); }