Skip to content

Commit

Permalink
Merge branch 'main' of github.com:opensearch-project/OpenSearch into …
Browse files Browse the repository at this point in the history
…allindexingcommits
  • Loading branch information
bharath-techie committed Aug 24, 2024
2 parents 6674064 + 2301adf commit d4ac7fc
Show file tree
Hide file tree
Showing 51 changed files with 499 additions and 88 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Bump `com.azure:azure-core-http-netty` from 1.15.1 to 1.15.3 ([#15300](https://github.com/opensearch-project/OpenSearch/pull/15300))
- Bump `com.gradle.develocity` from 3.17.6 to 3.18 ([#15297](https://github.com/opensearch-project/OpenSearch/pull/15297))
- Bump `commons-cli:commons-cli` from 1.8.0 to 1.9.0 ([#15298](https://github.com/opensearch-project/OpenSearch/pull/15298))
- Bump `opentelemetry` from 1.40.0 to 1.41.0 ([#15361](https://github.com/opensearch-project/OpenSearch/pull/15361))
- Bump `opentelemetry-semconv` from 1.26.0-alpha to 1.27.0-alpha ([#15361](https://github.com/opensearch-project/OpenSearch/pull/15361))

### Changed
- Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979))
Expand All @@ -65,6 +67,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fixed array field name omission in flat_object function for nested JSON ([#13620](https://github.com/opensearch-project/OpenSearch/pull/13620))
- Fix range aggregation optimization ignoring top level queries ([#15194](https://github.com/opensearch-project/OpenSearch/pull/15194))
- Fix incorrect parameter names in MinHash token filter configuration handling ([#15233](https://github.com/opensearch-project/OpenSearch/pull/15233))
- Fix split response processor not included in allowlist ([#15393](https://github.com/opensearch-project/OpenSearch/pull/15393))

### Security

Expand Down
4 changes: 2 additions & 2 deletions buildSrc/version.properties
Original file line number Diff line number Diff line change
Expand Up @@ -74,5 +74,5 @@ jzlib = 1.1.3
resteasy = 6.2.4.Final

# opentelemetry dependencies
opentelemetry = 1.40.0
opentelemetrysemconv = 1.26.0-alpha
opentelemetry = 1.41.0
opentelemetrysemconv = 1.27.0-alpha
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@ public Map<String, Processor.Factory<SearchResponseProcessor>> getResponseProces
new TruncateHitsResponseProcessor.Factory(),
CollapseResponseProcessor.TYPE,
new CollapseResponseProcessor.Factory(),
SplitResponseProcessor.TYPE,
new SplitResponseProcessor.Factory(),
SortResponseProcessor.TYPE,
new SortResponseProcessor.Factory()
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ public void testResponseProcessorAllowlist() throws IOException {
List.of("rename_field", "truncate_hits", "collapse"),
SearchPipelineCommonModulePlugin::getResponseProcessors
);
runAllowlistTest(key, List.of("split", "sort"), SearchPipelineCommonModulePlugin::getResponseProcessors);

final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
Expand Down Expand Up @@ -82,7 +83,7 @@ public void testAllowlistNotSpecified() throws IOException {
try (SearchPipelineCommonModulePlugin plugin = new SearchPipelineCommonModulePlugin()) {
assertEquals(Set.of("oversample", "filter_query", "script"), plugin.getRequestProcessors(createParameters(settings)).keySet());
assertEquals(
Set.of("rename_field", "truncate_hits", "collapse", "sort"),
Set.of("rename_field", "truncate_hits", "collapse", "split", "sort"),
plugin.getResponseProcessors(createParameters(settings)).keySet()
);
assertEquals(Set.of(), plugin.getSearchPhaseResultsProcessors(createParameters(settings)).keySet());
Expand Down
4 changes: 3 additions & 1 deletion plugins/telemetry-otel/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,9 @@ thirdPartyAudit {
'io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider',
'kotlin.io.path.PathsKt',
'io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider',
'io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener'
'io.opentelemetry.sdk.autoconfigure.spi.internal.AutoConfigureListener',
'io.opentelemetry.sdk.autoconfigure.spi.internal.ComponentProvider',
'io.opentelemetry.sdk.autoconfigure.spi.internal.StructuredConfigProperties'
)
}

Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ec5ad3b420c9fba4b340e85a3199fd0f2accd023

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
fd387313cc37a6e93062e9a80a2526634d22cb19

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3d7cf15ef425053e24e825160ca7b4ac08d721aa

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
cf92f4c1b60c2359c12f6f323f6a2a623c333910

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
8dee21440b811004ecc1c36c1cd44f9d3494546c

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
d86e60b6d49e389ebe5797d42a7288a20d30c162

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
aeba3075b8dfd97779edadc0a3711d999bb0e396

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
368d7905d6a0a313c63e3a91f895a3a08500519e

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
c740e8f7d0d914d6acd310ac53901bb8753c6e8d

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
b820861f85ba83db0ad896c47f723208d7473d5a

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
f88ee292f5605c87dfe85c8d90131bce9f0b3b8e

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
9d1200befb28e3e9f61073ac3de23cc55e509dc7

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
d9bbc2e2e800317d72fbf3141ae8391e95fa6229

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
906d916bee46f60260c09314284b5948c54a0662
Original file line number Diff line number Diff line change
Expand Up @@ -328,11 +328,9 @@ public void testValidCompositeIndex() {
assertEquals(2, starTreeFieldType.getMetrics().size());
assertEquals("numeric_dv", starTreeFieldType.getMetrics().get(0).getField());
List<MetricStat> expectedMetrics = Arrays.asList(
MetricStat.AVG,
MetricStat.COUNT,
MetricStat.VALUE_COUNT,
MetricStat.SUM,
MetricStat.MAX,
MetricStat.MIN
MetricStat.AVG
);
assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics());

Expand Down Expand Up @@ -376,11 +374,9 @@ public void testValidCompositeIndexWithDates() {
assertEquals("numeric_dv", starTreeFieldType.getDimensions().get(1).getField());
assertEquals("numeric_dv", starTreeFieldType.getMetrics().get(0).getField());
List<MetricStat> expectedMetrics = Arrays.asList(
MetricStat.AVG,
MetricStat.COUNT,
MetricStat.VALUE_COUNT,
MetricStat.SUM,
MetricStat.MAX,
MetricStat.MIN
MetricStat.AVG
);
assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics());
assertEquals(10000, starTreeFieldType.getStarTreeConfig().maxLeafDocs());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,12 @@

import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;

import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.opensearch.action.admin.cluster.health.ClusterHealthResponse;
import org.opensearch.action.admin.cluster.node.stats.NodeStats;
import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse;
Expand All @@ -56,7 +62,10 @@
import org.opensearch.env.NodeEnvironment;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.cache.request.RequestCacheStats;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.index.query.TermQueryBuilder;
import org.opensearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.opensearch.search.aggregations.bucket.histogram.Histogram;
Expand All @@ -65,6 +74,7 @@
import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase;
import org.opensearch.test.hamcrest.OpenSearchAssertions;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.ZoneId;
Expand Down Expand Up @@ -768,6 +778,59 @@ public void testDeleteAndCreateSameIndexShardOnSameNode() throws Exception {
assertTrue(stats.getMemorySizeInBytes() == 0);
}

public void testTimedOutQuery() throws Exception {
// A timed out query should be cached and then invalidated
Client client = client();
String index = "index";
assertAcked(
client.admin()
.indices()
.prepareCreate(index)
.setMapping("k", "type=keyword")
.setSettings(
Settings.builder()
.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
// Disable index refreshing to avoid cache being invalidated mid-test
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(-1))
)
.get()
);
indexRandom(true, client.prepareIndex(index).setSource("k", "hello"));
ensureSearchable(index);
// Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
forceMerge(client, index);

QueryBuilder timeoutQueryBuilder = new TermQueryBuilder("k", "hello") {
@Override
protected Query doToQuery(QueryShardContext context) {
return new TermQuery(new Term("k", "hello")) {
@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
// Create the weight before sleeping. Otherwise, TermStates.build() (in the call to super.createWeight()) will
// sometimes throw an exception on timeout, rather than timing out gracefully.
Weight result = super.createWeight(searcher, scoreMode, boost);
try {
Thread.sleep(500);
} catch (InterruptedException ignored) {}
return result;
}
};
}
};

SearchResponse resp = client.prepareSearch(index)
.setRequestCache(true)
.setQuery(timeoutQueryBuilder)
.setTimeout(TimeValue.ZERO)
.get();
assertTrue(resp.isTimedOut());
RequestCacheStats requestCacheStats = getRequestCacheStats(client, index);
// The cache should be empty as the timed-out query was invalidated
assertEquals(0, requestCacheStats.getMemorySizeInBytes());
}

private Path[] shardDirectory(String server, Index index, int shard) {
NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server);
final Path[] paths = env.availableShardPaths(new ShardId(index, shard));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.index.SegmentWriteState;

import java.io.Closeable;
import java.io.IOException;

/**
Expand All @@ -20,7 +21,7 @@
*
* @opensearch.experimental
*/
public class Lucene90DocValuesConsumerWrapper {
public class Lucene90DocValuesConsumerWrapper implements Closeable {

private final Lucene90DocValuesConsumer lucene90DocValuesConsumer;

Expand All @@ -37,4 +38,9 @@ public Lucene90DocValuesConsumerWrapper(
public Lucene90DocValuesConsumer getLucene90DocValuesConsumer() {
return lucene90DocValuesConsumer;
}

@Override
public void close() throws IOException {
lucene90DocValuesConsumer.close();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@

import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.SegmentReadState;
import org.opensearch.index.codec.composite.DocValuesProvider;

import java.io.Closeable;
import java.io.IOException;

/**
Expand All @@ -21,7 +21,7 @@
*
* @opensearch.experimental
*/
public class Lucene90DocValuesProducerWrapper implements DocValuesProvider {
public class Lucene90DocValuesProducerWrapper implements Closeable {

private final Lucene90DocValuesProducer lucene90DocValuesProducer;

Expand All @@ -35,9 +35,12 @@ public Lucene90DocValuesProducerWrapper(
lucene90DocValuesProducer = new Lucene90DocValuesProducer(state, dataCodec, dataExtension, metaCodec, metaExtension);
}

@Override
public DocValuesProducer getDocValuesProducer() {
public DocValuesProducer getLucene90DocValuesProducer() {
return lucene90DocValuesProducer;
}

@Override
public void close() throws IOException {
lucene90DocValuesProducer.close();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@
*/
@ExperimentalApi
public class CompositeCodecFactory {

// we can use this to track the latest composite codec
public static final String COMPOSITE_CODEC = Composite99Codec.COMPOSITE_INDEX_CODEC_NAME;

public CompositeCodecFactory() {}

public Map<String, Codec> getCompositeIndexCodecs(MapperService mapperService, Logger logger) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,33 +14,34 @@

import java.io.IOException;

import static org.opensearch.index.codec.composite.composite99.Composite99Codec.COMPOSITE_INDEX_CODEC_NAME;

/**
* A factory class that provides a factory method for creating {@link DocValuesConsumer} instances
* based on the specified composite codec.
* for the latest composite codec.
* <p>
* The segments are written using the latest composite codec. The codec
* internally manages calling the appropriate consumer factory for its abstractions.
* <p>
* This design ensures forward compatibility for writing operations
*
* @opensearch.experimental
*/
public class LuceneDocValuesConsumerFactory {

public static DocValuesConsumer getDocValuesConsumerForCompositeCodec(
String compositeCodec,
SegmentWriteState state,
String dataCodec,
String dataExtension,
String metaCodec,
String metaExtension
) throws IOException {

switch (compositeCodec) {
case COMPOSITE_INDEX_CODEC_NAME:
return new Lucene90DocValuesConsumerWrapper(state, dataCodec, dataExtension, metaCodec, metaExtension)
.getLucene90DocValuesConsumer();
default:
throw new IllegalStateException("Invalid composite codec " + "[" + compositeCodec + "]");
}

Lucene90DocValuesConsumerWrapper lucene90DocValuesConsumerWrapper = new Lucene90DocValuesConsumerWrapper(
state,
dataCodec,
dataExtension,
metaCodec,
metaExtension
);
return lucene90DocValuesConsumerWrapper.getLucene90DocValuesConsumer();
}

}
Loading

0 comments on commit d4ac7fc

Please sign in to comment.