From 34da6eae3e086e3c01f35ce2c1ac1d93fa3befc0 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Tue, 12 Apr 2022 22:28:46 +0530 Subject: [PATCH 01/19] fixing ci, adding tests and java docs Signed-off-by: Bharathwaj G --- .../action/search/CreatePITRequest.java | 2 +- .../action/search/CreatePITResponse.java | 1 + .../action/search/SearchContextId.java | 2 +- .../search/TransportCreatePITAction.java | 3 +- .../action/search/RestCreatePITAction.java | 4 +- .../org/opensearch/search/SearchService.java | 10 +- .../opensearch/search/PitMultiNodeTests.java | 83 +++++++- .../opensearch/search/PitSingleNodeTests.java | 189 ++++++++++++++++++ 8 files changed, 284 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java b/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java index 4ae17c0b0cde1..51d263d7ea856 100644 --- a/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java +++ b/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java @@ -122,7 +122,7 @@ public void setIndicesOptions(IndicesOptions indicesOptions) { public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (keepAlive == null) { - validationException = addValidationError("Keep alive is missing", validationException); + validationException = addValidationError("keep alive not specified", validationException); } return validationException; } diff --git a/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java b/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java index 48e1ee936871f..64419e785a838 100644 --- a/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java +++ b/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java @@ -56,6 +56,7 @@ public CreatePITResponse(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + builder.field("id", id); RestActions.buildBroadcastShardsHeader( builder, params, diff --git a/server/src/main/java/org/opensearch/action/search/SearchContextId.java b/server/src/main/java/org/opensearch/action/search/SearchContextId.java index 59ebb128b924a..fe3e75b122cf6 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchContextId.java +++ b/server/src/main/java/org/opensearch/action/search/SearchContextId.java @@ -111,7 +111,7 @@ public static SearchContextId decode(NamedWriteableRegistry namedWriteableRegist } return new SearchContextId(Collections.unmodifiableMap(shards), Collections.unmodifiableMap(aliasFilters)); } catch (IOException e) { - throw new IllegalArgumentException(e); + throw new IllegalArgumentException("Cannot parse pit id", e); } } diff --git a/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java b/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java index c2d7234483988..9d81951de411c 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java @@ -8,6 +8,7 @@ package org.opensearch.action.search; +import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.support.ActionFilters; @@ -175,7 +176,7 @@ public void executeUpdatePitId( groupedActionListener ); } catch (Exception e) { - groupedActionListener.onFailure(e); + groupedActionListener.onFailure(new OpenSearchException("Create pit failed on node[" + node + "]", e)); } } }, updatePitIdListener::onFailure); diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestCreatePITAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestCreatePITAction.java index 82ed24d95bc4f..7f2a800d134b4 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestCreatePITAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestCreatePITAction.java @@ -38,14 +38,12 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client boolean allowPartialPitCreation = request.paramAsBoolean("allow_partial_pit_creation", true); String[] indices = Strings.splitStringByCommaToArray(request.param("index")); TimeValue keepAlive = request.paramAsTime("keep_alive", null); - CreatePITRequest createPitRequest = new CreatePITRequest(request.paramAsTime("keep_alive", null), allowPartialPitCreation, indices); - ActionRequestValidationException validationException = null; if (keepAlive == null) { validationException = addValidationError("Keep alive cannot be empty", validationException); } ExceptionsHelper.reThrowIfNotNull(validationException); - + CreatePITRequest createPitRequest = new CreatePITRequest(keepAlive, allowPartialPitCreation, indices); createPitRequest.setIndicesOptions(IndicesOptions.fromRequest(request, createPitRequest.indicesOptions())); createPitRequest.setPreference(request.param("preference")); createPitRequest.setRouting(request.param("routing")); diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 1a0aa77a65a3b..415ba9fe20a00 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -323,7 +323,7 @@ public SearchService( .addSettingsUpdateConsumer( DEFAULT_KEEPALIVE_SETTING, MAX_PIT_KEEPALIVE_SETTING, - this::setKeepAlives, + this::setPitKeepAlives, this::validatePitKeepAlives ); @@ -364,6 +364,9 @@ private void validateKeepAlives(TimeValue defaultKeepAlive, TimeValue maxKeepAli } } + /** + * Default keep alive search setting should be less than max PIT keep alive + */ private void validatePitKeepAlives(TimeValue defaultKeepAlive, TimeValue maxPitKeepAlive) { if (defaultKeepAlive.millis() > maxPitKeepAlive.millis()) { throw new IllegalArgumentException( @@ -864,6 +867,7 @@ public void createPitReaderContext(ShardId shardId, TimeValue keepAlive, ActionL ReaderContext readerContext = null; boolean success = false; try { + // use this when reader context is freed decreasePitContexts = openPitContexts::decrementAndGet; if (openPitContexts.incrementAndGet() > maxOpenPitContext) { throw new OpenSearchRejectedExecutionException( @@ -890,6 +894,7 @@ public void createPitReaderContext(ShardId shardId, TimeValue keepAlive, ActionL searchOperationListener.onFreeReaderContext(finalReaderContext); searchOperationListener.onFreePitContext(finalReaderContext); }); + // add the newly created pit reader context to active readers putReaderContext(readerContext); readerContext = null; listener.onResponse(finalReaderContext.id()); @@ -1089,6 +1094,9 @@ private void checkKeepAliveLimit(long keepAlive) { } } + /** + * check if request keep alive is greater than max keep alive + */ private void checkPitKeepAliveLimit(long keepAlive) { if (keepAlive > maxPitKeepAlive) { throw new IllegalArgumentException( diff --git a/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java index 2f24c9d86db61..88e3a5357ddec 100644 --- a/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java +++ b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java @@ -21,9 +21,12 @@ import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; +import java.io.IOException; import java.util.concurrent.ExecutionException; +import static org.hamcrest.Matchers.containsString; import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) public class PitMultiNodeTests extends OpenSearchIntegTestCase { @@ -53,7 +56,7 @@ public void testPit() throws Exception { assertEquals(2, searchResponse.getTotalShards()); } - public void testCreatePitWhileNodeDropWithPartialCreationFalse() throws Exception { + public void testCreatePitWhileNodeDropWithAllowPartialCreationFalse() throws Exception { CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), false); request.setIndices(new String[] { "index" }); internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { @@ -79,7 +82,7 @@ public void testCreatePitWithAllNodesDown() throws Exception { assertTrue(ex.getMessage().contains("all shards failed")); } - public void testCreatePitWhileNodeDropWithAllowPartialFailuresTrue() throws Exception { + public void testCreatePitWhileNodeDropWithAllowPartialCreationTrue() throws Exception { CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); request.setIndices(new String[] { "index" }); internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { @@ -119,7 +122,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { }); } - public void testPitSearchWithNodeDropWithPartialSearchTrue() throws Exception { + public void testPitSearchWithNodeDropWithPartialSearchResultsFalse() throws Exception { CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); request.setIndices(new String[] { "index" }); ActionFuture execute = client().execute(CreatePITAction.INSTANCE, request); @@ -155,4 +158,78 @@ public void testPitSearchWithAllNodesDown() throws Exception { ExecutionException ex = expectThrows(ExecutionException.class, searchExecute::get); assertTrue(ex.getMessage().contains("all shards failed")); } + + public void testPitInvalidDefaultKeepAlive() { + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("pit.max_keep_alive", "1m").put("search.default_keep_alive", "2m")) + .get() + ); + assertThat(exc.getMessage(), containsString("was (2m > 1m)")); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "5m").put("pit.max_keep_alive", "5m")) + .get() + ); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "2m")) + .get() + ); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("pit.max_keep_alive", "2m")) + .get() + ); + + exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "3m")) + .get() + ); + assertThat(exc.getMessage(), containsString("was (3m > 2m)")); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "1m")) + .get() + ); + + exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("pit.max_keep_alive", "30s")) + .get() + ); + assertThat(exc.getMessage(), containsString("was (1m > 30s)")); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull("*")) + .setTransientSettings(Settings.builder().putNull("*")) + ); + + } + } diff --git a/server/src/test/java/org/opensearch/search/PitSingleNodeTests.java b/server/src/test/java/org/opensearch/search/PitSingleNodeTests.java index abeda0d5062ad..18f4c116dee8f 100644 --- a/server/src/test/java/org/opensearch/search/PitSingleNodeTests.java +++ b/server/src/test/java/org/opensearch/search/PitSingleNodeTests.java @@ -8,23 +8,31 @@ package org.opensearch.search; +import org.hamcrest.Matchers; import org.opensearch.action.ActionFuture; import org.opensearch.action.search.CreatePITAction; import org.opensearch.action.search.CreatePITRequest; import org.opensearch.action.search.CreatePITResponse; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexNotFoundException; import org.opensearch.search.builder.PointInTimeBuilder; +import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.equalTo; import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.query.QueryBuilders.matchAllQuery; +import static org.opensearch.index.query.QueryBuilders.queryStringQuery; +import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; public class PitSingleNodeTests extends OpenSearchSingleNodeTestCase { @@ -157,6 +165,19 @@ public void testPitSearchOnDeletedIndex() throws ExecutionException, Interrupted service.doClose(); } + public void testClearIllegalPitId() { + createIndex("idx"); + String id = "c2Nhbjs2OzM0NDg1ODpzRlBLc0FXNlNyNm5JWUc1"; + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("index") + .setSize(2) + .setPointInTime(new PointInTimeBuilder(id).setKeepAlive(TimeValue.timeValueDays(1))) + .get() + ); + assertEquals("Cannot parse pit id", e.getMessage()); + } + public void testPitSearchOnCloseIndex() throws ExecutionException, InterruptedException { createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -341,4 +362,172 @@ public void testOpenPitContextsConcurrently() throws Exception { service.doClose(); } + public void testPitAfterUpdateIndex() throws Exception { + client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 5)).get(); + client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); + + for (int i = 0; i < 50; i++) { + client().prepareIndex("test") + .setId(Integer.toString(i)) + .setSource( + jsonBuilder().startObject() + .field("user", "foobar") + .field("postDate", System.currentTimeMillis()) + .field("message", "test") + .endObject() + ) + .get(); + } + client().admin().indices().prepareRefresh().get(); + + // create pit + CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueMinutes(2), true); + request.setIndices(new String[] { "test" }); + ActionFuture execute = client().execute(CreatePITAction.INSTANCE, request); + CreatePITResponse pitResponse = execute.get(); + SearchService service = getInstanceFromNode(SearchService.class); + + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(matchAllQuery()) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + + // update index + SearchResponse searchResponse = client().prepareSearch() + .setQuery(queryStringQuery("user:foobar")) + .setSize(50) + .addSort("postDate", SortOrder.ASC) + .get(); + try { + do { + for (SearchHit searchHit : searchResponse.getHits().getHits()) { + Map map = searchHit.getSourceAsMap(); + map.put("message", "update"); + client().prepareIndex("test").setId(searchHit.getId()).setSource(map).get(); + } + searchResponse = client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get(); + + } while (searchResponse.getHits().getHits().length > 0); + + client().admin().indices().prepareRefresh().get(); + assertThat( + client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, + Matchers.equalTo(50L) + ); + /** + * assert without point in time + */ + + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, + Matchers.equalTo(50L) + ); + /** + * using point in time id will have the old search results before update + */ + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + } finally { + service.doClose(); + assertEquals(0, service.getActiveContexts()); + } + } + } From 30cb6c061b317266b7ac72da4017287c7ec82052 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Wed, 13 Apr 2022 13:54:20 +0530 Subject: [PATCH 02/19] Segregating create pit logic into separate controller Signed-off-by: Bharathwaj G --- .../action/search/CreatePITController.java | 221 ++++++++++ .../search/TransportCreatePITAction.java | 168 +------- .../search/CreatePitControllerTests.java | 402 ++++++++++++++++++ 3 files changed, 631 insertions(+), 160 deletions(-) create mode 100644 server/src/main/java/org/opensearch/action/search/CreatePITController.java create mode 100644 server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java diff --git a/server/src/main/java/org/opensearch/action/search/CreatePITController.java b/server/src/main/java/org/opensearch/action/search/CreatePITController.java new file mode 100644 index 0000000000000..4e32b1ad1752e --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/CreatePITController.java @@ -0,0 +1,221 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.action.support.GroupedActionListener; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.index.shard.ShardId; +import org.opensearch.search.SearchPhaseResult; +import org.opensearch.search.SearchService; +import org.opensearch.search.SearchShardTarget; +import org.opensearch.tasks.Task; +import org.opensearch.transport.Transport; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +/** + * Controller for creating PIT reader context + * Phase 1 of create PIT request : Create PIT reader contexts in the associated shards with a temporary keep alive + * Phase 2 of create PIT : Update PIT reader context with PIT ID and keep alive from request and + * fail user request if any of the updates in this phase are failed + */ +public class CreatePITController implements Runnable { + private final Runnable runner; + private final SearchTransportService searchTransportService; + private final ClusterService clusterService; + private final TransportSearchAction transportSearchAction; + private final NamedWriteableRegistry namedWriteableRegistry; + private final Task task; + private final ActionListener listener; + private final CreatePITRequest request; + + public CreatePITController( + CreatePITRequest request, + SearchTransportService searchTransportService, + ClusterService clusterService, + TransportSearchAction transportSearchAction, + NamedWriteableRegistry namedWriteableRegistry, + Task task, + ActionListener listener + ) { + this.searchTransportService = searchTransportService; + this.clusterService = clusterService; + this.transportSearchAction = transportSearchAction; + this.namedWriteableRegistry = namedWriteableRegistry; + this.task = task; + this.listener = listener; + this.request = request; + runner = this::executeCreatePit; + } + + private TimeValue getCreatePitTemporaryKeepAlive() { + return SearchService.CREATE_PIT_TEMPORARY_KEEPALIVE_SETTING.get(clusterService.getSettings()); + } + + public void executeCreatePit() { + SearchRequest searchRequest = new SearchRequest(request.getIndices()); + searchRequest.preference(request.getPreference()); + searchRequest.routing(request.getRouting()); + searchRequest.indicesOptions(request.getIndicesOptions()); + searchRequest.allowPartialSearchResults(request.shouldAllowPartialPitCreation()); + + SearchTask searchTask = new SearchTask( + task.getId(), + task.getType(), + task.getAction(), + () -> task.getDescription(), + task.getParentTaskId(), + task.getHeaders() + ); + + final StepListener createPitListener = new StepListener<>(); + + final ActionListener updatePitIdListener = ActionListener.wrap(r -> listener.onResponse(r), listener::onFailure); + /** + * Phase 1 of create PIT + */ + executeCreatePit(searchTask, searchRequest, createPitListener); + + /** + * Phase 2 of create PIT where we update pit id in pit contexts + */ + executeUpdatePitId(request, createPitListener, updatePitIdListener); + } + + /** + * Creates PIT reader context with temporary keep alive + */ + public void executeCreatePit(Task task, SearchRequest searchRequest, StepListener createPitListener) { + transportSearchAction.executeRequest( + task, + searchRequest, + TransportCreatePITAction.CREATE_PIT_ACTION, + true, + new TransportSearchAction.SinglePhaseSearchAction() { + @Override + public void executeOnShardTarget( + SearchTask searchTask, + SearchShardTarget target, + Transport.Connection connection, + ActionListener searchPhaseResultActionListener + ) { + searchTransportService.createPitContext( + connection, + new TransportCreatePITAction.CreateReaderContextRequest(target.getShardId(), getCreatePitTemporaryKeepAlive()), + searchTask, + ActionListener.wrap(r -> searchPhaseResultActionListener.onResponse(r), searchPhaseResultActionListener::onFailure) + ); + } + }, + createPitListener + ); + } + + /** + * Updates PIT ID, keep alive and createdTime of PIT reader context + */ + public void executeUpdatePitId( + CreatePITRequest request, + StepListener createPitListener, + ActionListener updatePitIdListener + ) { + createPitListener.whenComplete(createPITResponse -> { + SearchContextId contextId = SearchContextId.decode(namedWriteableRegistry, createPITResponse.getId()); + final StepListener> lookupListener = getConnectionLookupListener(contextId); + lookupListener.whenComplete(nodelookup -> { + final ActionListener groupedActionListener = getGroupedListener( + updatePitIdListener, + createPITResponse, + contextId.shards().size() + ); + /** + * store the create time ( same create time for all PIT contexts across shards ) to be used + * for list PIT api + */ + long createTime = System.currentTimeMillis(); + for (Map.Entry entry : contextId.shards().entrySet()) { + DiscoveryNode node = nodelookup.apply(entry.getValue().getClusterAlias(), entry.getValue().getNode()); + try { + final Transport.Connection connection = searchTransportService.getConnection( + entry.getValue().getClusterAlias(), + node + ); + searchTransportService.updatePitContext( + connection, + new UpdatePITContextRequest( + entry.getValue().getSearchContextId(), + createPITResponse.getId(), + request.getKeepAlive().millis(), + createTime + ), + groupedActionListener + ); + } catch (Exception e) { + groupedActionListener.onFailure(new OpenSearchException("Create pit failed on node[" + node + "]", e)); + } + } + }, updatePitIdListener::onFailure); + }, updatePitIdListener::onFailure); + } + + private StepListener> getConnectionLookupListener(SearchContextId contextId) { + ClusterState state = clusterService.state(); + + final Set clusters = contextId.shards() + .values() + .stream() + .filter(ctx -> Strings.isEmpty(ctx.getClusterAlias()) == false) + .map(SearchContextIdForNode::getClusterAlias) + .collect(Collectors.toSet()); + + final StepListener> lookupListener = new StepListener<>(); + + if (clusters.isEmpty() == false) { + searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); + } else { + lookupListener.onResponse((cluster, nodeId) -> state.getNodes().get(nodeId)); + } + return lookupListener; + } + + private ActionListener getGroupedListener( + ActionListener updatePitIdListener, + CreatePITResponse createPITResponse, + int size + ) { + return new GroupedActionListener<>(new ActionListener<>() { + @Override + public void onResponse(final Collection responses) { + updatePitIdListener.onResponse(createPITResponse); + } + + @Override + public void onFailure(final Exception e) { + updatePitIdListener.onFailure(e); + } + }, size); + } + + @Override + public void run() { + runner.run(); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java b/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java index 9d81951de411c..225973211c406 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportCreatePITAction.java @@ -8,16 +8,10 @@ package org.opensearch.action.search; -import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; -import org.opensearch.action.StepListener; import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.GroupedActionListener; import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.io.stream.StreamInput; @@ -25,26 +19,15 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.index.shard.ShardId; import org.opensearch.search.SearchPhaseResult; -import org.opensearch.search.SearchService; -import org.opensearch.search.SearchShardTarget; import org.opensearch.search.internal.ShardSearchContextId; import org.opensearch.tasks.Task; -import org.opensearch.transport.Transport; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportService; import java.io.IOException; -import java.util.Collection; -import java.util.Map; -import java.util.Set; -import java.util.function.BiFunction; -import java.util.stream.Collectors; /** * Transport action for creating PIT reader context - * Phase 1 of create PIT request : Create PIT reader contexts in the associated shards with a temporary keep alive - * Phase 2 of create PIT : Update PIT reader context with PIT ID and keep alive from request and - * fail user request if any of the updates in this phase are failed */ public class TransportCreatePITAction extends HandledTransportAction { @@ -72,153 +55,18 @@ public TransportCreatePITAction( this.namedWriteableRegistry = namedWriteableRegistry; } - public TimeValue getCreatePitTemporaryKeepAlive() { - return SearchService.CREATE_PIT_TEMPORARY_KEEPALIVE_SETTING.get(clusterService.getSettings()); - } - @Override protected void doExecute(Task task, CreatePITRequest request, ActionListener listener) { - SearchRequest searchRequest = new SearchRequest(request.getIndices()); - searchRequest.preference(request.getPreference()); - searchRequest.routing(request.getRouting()); - searchRequest.indicesOptions(request.getIndicesOptions()); - searchRequest.allowPartialSearchResults(request.shouldAllowPartialPitCreation()); - - SearchTask searchTask = new SearchTask( - task.getId(), - task.getType(), - task.getAction(), - () -> task.getDescription(), - task.getParentTaskId(), - task.getHeaders() - ); - - final StepListener createPitListener = new StepListener<>(); - - final ActionListener updatePitIdListener = ActionListener.wrap(r -> listener.onResponse(r), listener::onFailure); - /** - * Phase 1 of create PIT - */ - executeCreatePit(searchTask, searchRequest, createPitListener); - - /** - * Phase 2 of create PIT where we update pit id in pit contexts - */ - executeUpdatePitId(request, createPitListener, updatePitIdListener); - } - - /** - * Creates PIT reader context with temporary keep alive - */ - public void executeCreatePit(Task task, SearchRequest searchRequest, StepListener createPitListener) { - transportSearchAction.executeRequest( + Runnable runnable = new CreatePITController( + request, + searchTransportService, + clusterService, + transportSearchAction, + namedWriteableRegistry, task, - searchRequest, - CREATE_PIT_ACTION, - true, - new TransportSearchAction.SinglePhaseSearchAction() { - @Override - public void executeOnShardTarget( - SearchTask searchTask, - SearchShardTarget target, - Transport.Connection connection, - ActionListener searchPhaseResultActionListener - ) { - searchTransportService.createPitContext( - connection, - new CreateReaderContextRequest(target.getShardId(), getCreatePitTemporaryKeepAlive()), - searchTask, - ActionListener.wrap(r -> searchPhaseResultActionListener.onResponse(r), searchPhaseResultActionListener::onFailure) - ); - } - }, - createPitListener + listener ); - } - - /** - * Updates PIT ID, keep alive and createdTime of PIT reader context - */ - public void executeUpdatePitId( - CreatePITRequest request, - StepListener createPitListener, - ActionListener updatePitIdListener - ) { - createPitListener.whenComplete(createPITResponse -> { - SearchContextId contextId = SearchContextId.decode(namedWriteableRegistry, createPITResponse.getId()); - final StepListener> lookupListener = getConnectionLookupListener(contextId); - lookupListener.whenComplete(nodelookup -> { - final ActionListener groupedActionListener = getGroupedListener( - updatePitIdListener, - createPITResponse, - contextId.shards().size() - ); - /** - * store the create time ( same create time for all PIT contexts across shards ) to be used - * for list PIT api - */ - long createTime = System.currentTimeMillis(); - for (Map.Entry entry : contextId.shards().entrySet()) { - DiscoveryNode node = nodelookup.apply(entry.getValue().getClusterAlias(), entry.getValue().getNode()); - try { - final Transport.Connection connection = searchTransportService.getConnection( - entry.getValue().getClusterAlias(), - node - ); - searchTransportService.updatePitContext( - connection, - new UpdatePITContextRequest( - entry.getValue().getSearchContextId(), - createPITResponse.getId(), - request.getKeepAlive().millis(), - createTime - ), - groupedActionListener - ); - } catch (Exception e) { - groupedActionListener.onFailure(new OpenSearchException("Create pit failed on node[" + node + "]", e)); - } - } - }, updatePitIdListener::onFailure); - }, updatePitIdListener::onFailure); - } - - private StepListener> getConnectionLookupListener(SearchContextId contextId) { - ClusterState state = clusterService.state(); - - final Set clusters = contextId.shards() - .values() - .stream() - .filter(ctx -> Strings.isEmpty(ctx.getClusterAlias()) == false) - .map(SearchContextIdForNode::getClusterAlias) - .collect(Collectors.toSet()); - - final StepListener> lookupListener = new StepListener<>(); - - if (clusters.isEmpty() == false) { - searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); - } else { - lookupListener.onResponse((cluster, nodeId) -> state.getNodes().get(nodeId)); - } - return lookupListener; - } - - private ActionListener getGroupedListener( - ActionListener updatePitIdListener, - CreatePITResponse createPITResponse, - int size - ) { - return new GroupedActionListener<>(new ActionListener<>() { - @Override - public void onResponse(final Collection responses) { - updatePitIdListener.onResponse(createPITResponse); - } - - @Override - public void onFailure(final Exception e) { - updatePitIdListener.onFailure(e); - } - }, size); + runnable.run(); } public static class CreateReaderContextRequest extends TransportRequest { diff --git a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java new file mode 100644 index 0000000000000..c44d8c651cce6 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java @@ -0,0 +1,402 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.opensearch.Version; +import org.opensearch.action.ActionListener; +import org.opensearch.action.LatchedActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.concurrent.AtomicArray; +import org.opensearch.index.query.IdsQueryBuilder; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.shard.ShardId; +import org.opensearch.search.*; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.internal.AliasFilter; +import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.search.internal.ShardSearchContextId; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskId; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.transport.Transport; +import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class CreatePitControllerTests extends OpenSearchTestCase { + + DiscoveryNode node1 = null; + DiscoveryNode node2 = null; + DiscoveryNode node3 = null; + String pitId = null; + TransportSearchAction transportSearchAction = null; + Task task = null; + DiscoveryNodes nodes = null; + NamedWriteableRegistry namedWriteableRegistry = null; + SearchResponse searchResponse = null; + ActionListener createPitListener = null; + ClusterService clusterServiceMock = null; + + @Before + public void setupData() { + node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); + node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); + setPitId(); + namedWriteableRegistry = new NamedWriteableRegistry( + Arrays.asList( + new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, MatchAllQueryBuilder.NAME, MatchAllQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, IdsQueryBuilder.NAME, IdsQueryBuilder::new) + ) + ); + nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); + transportSearchAction = mock(TransportSearchAction.class); + task = new Task( + randomLong(), + "transport", + SearchAction.NAME, + "description", + new TaskId(randomLong() + ":" + randomLong()), + Collections.emptyMap() + ); + InternalSearchResponse response = new InternalSearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), + InternalAggregations.EMPTY, + null, + null, + false, + null, + 1 + ); + searchResponse = new SearchResponse( + response, + null, + 3, + 3, + 0, + 100, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY, + pitId + ); + createPitListener = new ActionListener() { + @Override + public void onResponse(CreatePITResponse createPITResponse) { + assertEquals(3, createPITResponse.getTotalShards()); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + + clusterServiceMock = mock(ClusterService.class); + ClusterState state = mock(ClusterState.class); + + final Settings keepAliveSettings = Settings.builder() + .put(SearchService.CREATE_PIT_TEMPORARY_KEEPALIVE_SETTING.getKey(), 30000) + .build(); + when(clusterServiceMock.getSettings()).thenReturn(keepAliveSettings); + + when(state.getMetadata()).thenReturn(Metadata.EMPTY_METADATA); + when(state.metadata()).thenReturn(Metadata.EMPTY_METADATA); + when(clusterServiceMock.state()).thenReturn(state); + when(state.getNodes()).thenReturn(nodes); + } + + public void testUpdatePitAfterCreatePitSuccess() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePITContextRequest request, + ActionListener listener + ) { + updateNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new UpdatePitContextResponse("pitid", 500000, 500000))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + + CountDownLatch latch = new CountDownLatch(1); + + CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + + CreatePITController controller = new CreatePITController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + CreatePITResponse createPITResponse = new CreatePITResponse(searchResponse); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePITResponse createPITResponse) { + assertEquals(3, createPITResponse.getTotalShards()); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }, latch); + + StepListener createListener = new StepListener<>(); + + controller.executeUpdatePitId(request, createListener, updatelistener); + createListener.onResponse(createPITResponse); + latch.await(); + assertEquals(3, updateNodesInvoked.size()); + } + + public void testUpdatePitAfterCreatePitFailure() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePITContextRequest request, + ActionListener listener + ) { + updateNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new UpdatePitContextResponse("pitid", 500000, 500000))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + + CountDownLatch latch = new CountDownLatch(1); + + CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + + CreatePITController controller = new CreatePITController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePITResponse createPITResponse) { + throw new AssertionError("on response is called"); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e.getCause().getMessage().contains("Exception occurred in phase 1")); + } + }, latch); + + StepListener createListener = new StepListener<>(); + + controller.executeUpdatePitId(request, createListener, updatelistener); + createListener.onFailure(new Exception("Exception occurred in phase 1")); + latch.await(); + assertEquals(0, updateNodesInvoked.size()); + } + + public void testUpdatePitFailureForNodeDrop() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePITContextRequest request, + ActionListener listener + ) { + + updateNodesInvoked.add(connection.getNode()); + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new UpdatePitContextResponse("pitid", 500000, 500000))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + CreatePITController controller = new CreatePITController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + CreatePITResponse createPITResponse = new CreatePITResponse(searchResponse); + CountDownLatch latch = new CountDownLatch(1); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePITResponse createPITResponse) { + throw new AssertionError("response is called"); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e.getMessage().contains("node 3 down")); + } + }, latch); + + StepListener createListener = new StepListener<>(); + controller.executeUpdatePitId(request, createListener, updatelistener); + createListener.onResponse(createPITResponse); + latch.await(); + assertEquals(3, updateNodesInvoked.size()); + } + + public void testUpdatePitFailureWhereAllNodesDown() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePITContextRequest request, + ActionListener listener + ) { + updateNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onFailure(new Exception("node down"))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + CreatePITController controller = new CreatePITController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + CreatePITResponse createPITResponse = new CreatePITResponse(searchResponse); + CountDownLatch latch = new CountDownLatch(1); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePITResponse createPITResponse) { + throw new AssertionError("response is called"); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e.getMessage().contains("node down")); + } + }, latch); + + StepListener createListener = new StepListener<>(); + controller.executeUpdatePitId(request, createListener, updatelistener); + createListener.onResponse(createPITResponse); + latch.await(); + assertEquals(3, updateNodesInvoked.size()); + } + + QueryBuilder randomQueryBuilder() { + if (randomBoolean()) { + return new TermQueryBuilder(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } else if (randomBoolean()) { + return new MatchAllQueryBuilder(); + } else { + return new IdsQueryBuilder().addIds(randomAlphaOfLength(10)); + } + } + + private void setPitId() { + AtomicArray array = new AtomicArray<>(3); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("a", 1), + node1 + ); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("b", 12), + node2 + ); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("c", 42), + node3 + ); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + array.setOnce(0, testSearchPhaseResult1); + array.setOnce(1, testSearchPhaseResult2); + array.setOnce(2, testSearchPhaseResult3); + + final Version version = Version.CURRENT; + final Map aliasFilters = new HashMap<>(); + for (SearchPhaseResult result : array.asList()) { + final AliasFilter aliasFilter; + if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder()); + } else if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder(), "alias-" + between(1, 10)); + } else { + aliasFilter = AliasFilter.EMPTY; + } + if (randomBoolean()) { + aliasFilters.put(result.getSearchShardTarget().getShardId().getIndex().getUUID(), aliasFilter); + } + } + pitId = SearchContextId.encode(array.asList(), aliasFilters, version); + } + +} From 57232fbfa179bd8ae6c07f2ddafeb43d4e397c68 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Wed, 13 Apr 2022 18:48:12 +0530 Subject: [PATCH 03/19] Delete PIT API Signed-off-by: Bharathwaj G --- .../org/opensearch/action/ActionModule.java | 12 +- .../action/search/DeletePITAction.java | 21 +++ .../action/search/DeletePITController.java | 126 +++++++++++++ .../action/search/DeletePITRequest.java | 132 ++++++++++++++ .../action/search/DeletePITResponse.java | 80 +++++++++ .../action/search/SearchTransportService.java | 53 ++++++ .../search/TransportDeletePITAction.java | 155 ++++++++++++++++ .../java/org/opensearch/client/Client.java | 16 +- .../client/support/AbstractClient.java | 24 +-- .../action/search/RestDeletePITAction.java | 55 ++++++ .../org/opensearch/search/SearchService.java | 27 +++ .../search/DeletePitMultiNodeTests.java | 167 ++++++++++++++++++ .../opensearch/search/PitMultiNodeTests.java | 1 - .../search/pit/RestDeletePitActionTests.java | 83 +++++++++ 14 files changed, 913 insertions(+), 39 deletions(-) create mode 100644 server/src/main/java/org/opensearch/action/search/DeletePITAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/DeletePITController.java create mode 100644 server/src/main/java/org/opensearch/action/search/DeletePITRequest.java create mode 100644 server/src/main/java/org/opensearch/action/search/DeletePITResponse.java create mode 100644 server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java create mode 100644 server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java create mode 100644 server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java create mode 100644 server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index 481b48913e389..67f5a3afb1a1b 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -237,11 +237,13 @@ import org.opensearch.action.main.TransportMainAction; import org.opensearch.action.search.ClearScrollAction; import org.opensearch.action.search.CreatePITAction; +import org.opensearch.action.search.DeletePITAction; import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.SearchAction; import org.opensearch.action.search.SearchScrollAction; import org.opensearch.action.search.TransportClearScrollAction; import org.opensearch.action.search.TransportCreatePITAction; +import org.opensearch.action.search.TransportDeletePITAction; import org.opensearch.action.search.TransportMultiSearchAction; import org.opensearch.action.search.TransportSearchAction; import org.opensearch.action.search.TransportSearchScrollAction; @@ -400,13 +402,7 @@ import org.opensearch.rest.action.ingest.RestGetPipelineAction; import org.opensearch.rest.action.ingest.RestPutPipelineAction; import org.opensearch.rest.action.ingest.RestSimulatePipelineAction; -import org.opensearch.rest.action.search.RestClearScrollAction; -import org.opensearch.rest.action.search.RestCountAction; -import org.opensearch.rest.action.search.RestCreatePITAction; -import org.opensearch.rest.action.search.RestExplainAction; -import org.opensearch.rest.action.search.RestMultiSearchAction; -import org.opensearch.rest.action.search.RestSearchAction; -import org.opensearch.rest.action.search.RestSearchScrollAction; +import org.opensearch.rest.action.search.*; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.usage.UsageService; @@ -664,6 +660,7 @@ public void reg actions.register(DeleteDanglingIndexAction.INSTANCE, TransportDeleteDanglingIndexAction.class); actions.register(FindDanglingIndexAction.INSTANCE, TransportFindDanglingIndexAction.class); actions.register(CreatePITAction.INSTANCE, TransportCreatePITAction.class); + actions.register(DeletePITAction.INSTANCE, TransportDeletePITAction.class); return unmodifiableMap(actions.getRegistry()); } @@ -839,6 +836,7 @@ public void initRestHandlers(Supplier nodesInCluster) { // Point in time API registerHandler.accept(new RestCreatePITAction()); + registerHandler.accept(new RestDeletePITAction()); for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers( settings, diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITAction.java b/server/src/main/java/org/opensearch/action/search/DeletePITAction.java new file mode 100644 index 0000000000000..7f043a365c403 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/DeletePITAction.java @@ -0,0 +1,21 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionType; + +public class DeletePITAction extends ActionType { + + public static final DeletePITAction INSTANCE = new DeletePITAction(); + public static final String NAME = "indices:admin/delete/pit"; + + private DeletePITAction() { + super(NAME, DeletePITResponse::new); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITController.java b/server/src/main/java/org/opensearch/action/search/DeletePITController.java new file mode 100644 index 0000000000000..88d4a71f77259 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/DeletePITController.java @@ -0,0 +1,126 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionListener; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.util.concurrent.CountDown; +import org.opensearch.search.SearchPhaseResult; +import org.opensearch.search.internal.ShardSearchContextId; +import org.opensearch.transport.Transport; +import org.opensearch.transport.TransportResponse; + +import java.util.*; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +public class DeletePITController implements Runnable { + private final DiscoveryNodes nodes; + private final SearchTransportService searchTransportService; + private final CountDown expectedOps; + private final ActionListener listener; + private final AtomicBoolean hasFailed = new AtomicBoolean(false); + private final AtomicInteger freedSearchContexts = new AtomicInteger(0); + private final ClusterService clusterService; + private final Runnable runner; + + public DeletePITController( + DeletePITRequest request, + ActionListener listener, + ClusterService clusterService, + SearchTransportService searchTransportService + ) { + this.nodes = clusterService.state().getNodes(); + this.clusterService = clusterService; + this.searchTransportService = searchTransportService; + this.listener = listener; + List pitIds = request.getPitIds(); + final int expectedOps; + if (pitIds.size() == 1 && "_all".equals(pitIds.get(0))) { + expectedOps = nodes.getSize(); + runner = this::deleteAllPits; + } else { + // TODO: replace this with #closeContexts + List contexts = new ArrayList<>(); + for (String scrollId : request.getPitIds()) { + SearchContextIdForNode[] context = TransportSearchHelper.parseScrollId(scrollId).getContext(); + Collections.addAll(contexts, context); + } + if (contexts.isEmpty()) { + expectedOps = 0; + runner = () -> listener.onResponse(new DeletePITResponse(true)); + } else { + expectedOps = contexts.size(); + runner = () -> ClearScrollController.closeContexts( + clusterService.state().nodes(), + searchTransportService, + contexts, + ActionListener.wrap(r -> listener.onResponse(new DeletePITResponse(true)), listener::onFailure) + ); + } + } + this.expectedOps = new CountDown(expectedOps); + + } + + @Override + public void run() { + runner.run(); + } + + void deleteAllPits() { + for (final DiscoveryNode node : clusterService.state().getNodes()) { + try { + Transport.Connection connection = searchTransportService.getConnection(null, node); + searchTransportService.sendDeleteAllPitContexts(connection, new ActionListener() { + @Override + public void onResponse(TransportResponse response) { + onFreedContext(true); + } + + @Override + public void onFailure(Exception e) { + onFailedFreedContext(e, node); + } + }); + } catch (Exception e) { + onFailedFreedContext(e, node); + } + } + } + + public static class PITSinglePhaseSearchResult extends SearchPhaseResult { + public void setContextId(ShardSearchContextId contextId) { + this.contextId = contextId; + } + } + + private void onFreedContext(boolean freed) { + if (freed) { + freedSearchContexts.incrementAndGet(); + } + if (expectedOps.countDown()) { + boolean succeeded = hasFailed.get() == false; + listener.onResponse(new DeletePITResponse(succeeded)); + } + } + + private void onFailedFreedContext(Throwable e, DiscoveryNode node) { + /* + * We have to set the failure marker before we count down otherwise we can expose the failure marker before we have set it to a + * racing thread successfully freeing a context. This would lead to that thread responding that the clear scroll succeeded. + */ + hasFailed.set(true); + if (expectedOps.countDown()) { + listener.onResponse(new DeletePITResponse(false)); + } + } +} diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java b/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java new file mode 100644 index 0000000000000..2bd8e9681318b --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java @@ -0,0 +1,132 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.ToXContent; +import org.opensearch.common.xcontent.ToXContentObject; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.opensearch.action.ValidateActions.addValidationError; + +/** + * Request to delete one or more PIT contexts based on IDs. + */ +public class DeletePITRequest extends ActionRequest implements ToXContentObject { + + private List pitIds; + + public DeletePITRequest(StreamInput in) throws IOException { + super(in); + pitIds = Arrays.asList(in.readStringArray()); + } + + public DeletePITRequest(String... pitIds) { + if (pitIds != null) { + this.pitIds = Arrays.asList(pitIds); + } + } + + public DeletePITRequest(List pitIds) { + if (pitIds != null) { + this.pitIds = pitIds; + } + } + + public DeletePITRequest() {} + + public List getPitIds() { + return pitIds; + } + + public void setPitIds(List pitIds) { + this.pitIds = pitIds; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (pitIds == null || pitIds.isEmpty()) { + validationException = addValidationError("no pit ids specified", validationException); + } + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (pitIds == null) { + out.writeVInt(0); + } else { + out.writeStringArray(pitIds.toArray(new String[pitIds.size()])); + } + } + + public void addPitId(String pitId) { + if (pitIds == null) { + pitIds = new ArrayList<>(); + } + pitIds.add(pitId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.startArray("pit_id"); + for (String pitId : pitIds) { + builder.value(pitId); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + public void fromXContent(XContentParser parser) throws IOException { + pitIds = null; + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException("Malformed content, must start with an object"); + } else { + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if ("pit_id".equals(currentFieldName)) { + if (token == XContentParser.Token.START_ARRAY) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token.isValue() == false) { + throw new IllegalArgumentException("pit_id array element should only contain pit_id"); + } + addPitId(parser.text()); + } + } else { + if (token.isValue() == false) { + throw new IllegalArgumentException("pit_id element should only contain pit_id"); + } + addPitId(parser.text()); + } + } else { + throw new IllegalArgumentException( + "Unknown parameter [" + currentFieldName + "] in request body or parameter is of the wrong type[" + token + "] " + ); + } + } + } + } + +} diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java b/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java new file mode 100644 index 0000000000000..388ca99b42402 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionResponse; +import org.opensearch.common.ParseField; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.*; +import org.opensearch.rest.RestStatus; + +import java.io.IOException; + +import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.opensearch.rest.RestStatus.NOT_FOUND; +import static org.opensearch.rest.RestStatus.OK; + +public class DeletePITResponse extends ActionResponse implements StatusToXContentObject { + + private final boolean succeeded; + + public DeletePITResponse(boolean succeeded) { + this.succeeded = succeeded; + } + + public DeletePITResponse(StreamInput in) throws IOException { + super(in); + succeeded = in.readBoolean(); + } + + /** + * @return Whether the attempt to delete PIT was successful. + */ + public boolean isSucceeded() { + return succeeded; + } + + @Override + public RestStatus status() { + return succeeded ? OK : NOT_FOUND; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(succeeded); + } + + private static final ParseField SUCCEEDED = new ParseField("succeeded"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "delete_pit", + true, + a -> new DeletePITResponse((boolean) a[0]) + ); + static { + PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), SUCCEEDED, ObjectParser.ValueType.BOOLEAN); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(SUCCEEDED.getPreferredName(), succeeded); + builder.endObject(); + return builder; + } + + /** + * Parse the delete PIT response body into a new {@link DeletePITResponse} object + */ + public static DeletePITResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java index 46946b5873466..b8abb6f96e574 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java @@ -83,8 +83,10 @@ public class SearchTransportService { public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]"; + public static final String FREE_CONTEXT_PIT_ACTION_NAME = "indices:data/read/search[free_context/pit]"; public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]"; public static final String CLEAR_SCROLL_CONTEXTS_ACTION_NAME = "indices:data/read/search[clear_scroll_contexts]"; + public static final String DELETE_ALL_PIT_CONTEXTS_ACTION_NAME = "indices:data/read/search[delete_pit_contexts]"; public static final String DFS_ACTION_NAME = "indices:data/read/search[phase/dfs]"; public static final String QUERY_ACTION_NAME = "indices:data/read/search[phase/query]"; public static final String QUERY_ID_ACTION_NAME = "indices:data/read/search[phase/query/id]"; @@ -142,6 +144,20 @@ public void sendFreeContext( ); } + public void sendPitFreeContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + transportService.sendRequest( + connection, + FREE_CONTEXT_PIT_ACTION_NAME, + new ScrollFreeContextRequest(contextId), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new) + ); + } + public void updatePitContext( Transport.Connection connection, UpdatePITContextRequest request, @@ -198,6 +214,16 @@ public void sendClearAllScrollContexts(Transport.Connection connection, final Ac ); } + public void sendDeleteAllPitContexts(Transport.Connection connection, final ActionListener listener) { + transportService.sendRequest( + connection, + DELETE_ALL_PIT_CONTEXTS_ACTION_NAME, + TransportRequest.Empty.INSTANCE, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(listener, (in) -> TransportResponse.Empty.INSTANCE) + ); + } + public void sendExecuteDfs( Transport.Connection connection, final ShardSearchRequest request, @@ -437,6 +463,18 @@ public static void registerRequestHandler(TransportService transportService, Sea } ); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, SearchFreeContextResponse::new); + + transportService.registerRequestHandler( + FREE_CONTEXT_PIT_ACTION_NAME, + ThreadPool.Names.SAME, + ScrollFreeContextRequest::new, + (request, channel, task) -> { + boolean freed = searchService.freeReaderContextIfFound(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + } + ); + TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_PIT_ACTION_NAME, SearchFreeContextResponse::new); + transportService.registerRequestHandler( FREE_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, @@ -620,6 +658,21 @@ public static void registerRequestHandler(TransportService transportService, Sea ); TransportActionProxy.registerProxyAction(transportService, UPDATE_READER_CONTEXT_ACTION_NAME, UpdatePitContextResponse::new); + transportService.registerRequestHandler( + DELETE_ALL_PIT_CONTEXTS_ACTION_NAME, + ThreadPool.Names.SAME, + TransportRequest.Empty::new, + (request, channel, task) -> { + searchService.freeAllPitContexts(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + } + ); + TransportActionProxy.registerProxyAction( + transportService, + DELETE_ALL_PIT_CONTEXTS_ACTION_NAME, + (in) -> TransportResponse.Empty.INSTANCE + ); + } /** diff --git a/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java b/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java new file mode 100644 index 0000000000000..c28ff8797a2d0 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java @@ -0,0 +1,155 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.GroupedActionListener; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Strings; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.search.SearchService; +import org.opensearch.tasks.Task; +import org.opensearch.transport.Transport; +import org.opensearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +public class TransportDeletePITAction extends HandledTransportAction { + private SearchService searchService; + private final NamedWriteableRegistry namedWriteableRegistry; + private TransportSearchAction transportSearchAction; + private final ClusterService clusterService; + private final SearchTransportService searchTransportService; + private static final Logger logger = LogManager.getLogger(TransportDeletePITAction.class); + + @Inject + public TransportDeletePITAction( + SearchService searchService, + TransportService transportService, + ActionFilters actionFilters, + NamedWriteableRegistry namedWriteableRegistry, + TransportSearchAction transportSearchAction, + ClusterService clusterService, + SearchTransportService searchTransportService + ) { + super(DeletePITAction.NAME, transportService, actionFilters, DeletePITRequest::new); + this.searchService = searchService; + this.namedWriteableRegistry = namedWriteableRegistry; + this.transportSearchAction = transportSearchAction; + this.clusterService = clusterService; + this.searchTransportService = searchTransportService; + } + + @Override + protected void doExecute(Task task, DeletePITRequest request, ActionListener listener) { + List contexts = new ArrayList<>(); + List pitIds = request.getPitIds(); + if (pitIds.size() == 1 && "_all".equals(pitIds.get(0))) { + deleteAllPits(listener); + } else { + for (String pitId : request.getPitIds()) { + SearchContextId contextId = SearchContextId.decode(namedWriteableRegistry, pitId); + contexts.addAll(contextId.shards().values()); + } + deletePits(contexts, ActionListener.wrap(r -> { + if (r == contexts.size()) { + listener.onResponse(new DeletePITResponse(true)); + } else { + listener.onResponse(new DeletePITResponse(false)); + } + }, e -> { + logger.debug("Delete PIT failed ", e); + listener.onResponse(new DeletePITResponse(false)); + })); + } + } + + void deleteAllPits(ActionListener listener) { + int size = clusterService.state().getNodes().getSize(); + ActionListener groupedActionListener = getGroupedListener(listener, size); + for (final DiscoveryNode node : clusterService.state().getNodes()) { + try { + Transport.Connection connection = searchTransportService.getConnection(null, node); + searchTransportService.sendDeleteAllPitContexts(connection, groupedActionListener); + } catch (Exception e) { + groupedActionListener.onFailure(e); + } + } + } + + void deletePits(List contexts, ActionListener listener) { + final StepListener> lookupListener = getLookupListener(contexts); + lookupListener.whenComplete(nodeLookup -> { + final GroupedActionListener groupedListener = new GroupedActionListener<>( + ActionListener.delegateFailure(listener, (l, rs) -> l.onResponse(Math.toIntExact(rs.stream().filter(r -> r).count()))), + contexts.size() + ); + + for (SearchContextIdForNode contextId : contexts) { + final DiscoveryNode node = nodeLookup.apply(contextId.getClusterAlias(), contextId.getNode()); + if (node == null) { + groupedListener.onFailure(new OpenSearchException("node not connected")); + } else { + try { + final Transport.Connection connection = searchTransportService.getConnection(contextId.getClusterAlias(), node); + searchTransportService.sendPitFreeContext( + connection, + contextId.getSearchContextId(), + ActionListener.wrap(r -> groupedListener.onResponse(r.isFreed()), e -> groupedListener.onResponse(false)) + ); + } catch (Exception e) { + groupedListener.onResponse(false); + } + } + } + }, listener::onFailure); + } + + private StepListener> getLookupListener(List contexts) { + final StepListener> lookupListener = new StepListener<>(); + final Set clusters = contexts.stream() + .filter(ctx -> Strings.isEmpty(ctx.getClusterAlias()) == false) + .map(SearchContextIdForNode::getClusterAlias) + .collect(Collectors.toSet()); + if (clusters.isEmpty() == false) { + searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); + } else { + lookupListener.onResponse((cluster, nodeId) -> clusterService.state().getNodes().get(nodeId)); + } + return lookupListener; + } + + private ActionListener getGroupedListener(ActionListener deletePitListener, int size) { + return new GroupedActionListener<>(new ActionListener<>() { + @Override + public void onResponse(final Collection responses) { + deletePitListener.onResponse(new DeletePITResponse(true)); + } + + @Override + public void onFailure(final Exception e) { + logger.debug("Delete all PITs failed ", e); + deletePitListener.onResponse(new DeletePITResponse(false)); + } + }, size); + } +} diff --git a/server/src/main/java/org/opensearch/client/Client.java b/server/src/main/java/org/opensearch/client/Client.java index ec42e5a13c50a..5d9c958b3b4b2 100644 --- a/server/src/main/java/org/opensearch/client/Client.java +++ b/server/src/main/java/org/opensearch/client/Client.java @@ -55,19 +55,7 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.search.ClearScrollRequest; -import org.opensearch.action.search.ClearScrollRequestBuilder; -import org.opensearch.action.search.ClearScrollResponse; -import org.opensearch.action.search.CreatePITRequest; -import org.opensearch.action.search.CreatePITResponse; -import org.opensearch.action.search.MultiSearchRequest; -import org.opensearch.action.search.MultiSearchRequestBuilder; -import org.opensearch.action.search.MultiSearchResponse; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchRequestBuilder; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.action.search.SearchScrollRequest; -import org.opensearch.action.search.SearchScrollRequestBuilder; +import org.opensearch.action.search.*; import org.opensearch.action.termvectors.MultiTermVectorsRequest; import org.opensearch.action.termvectors.MultiTermVectorsRequestBuilder; import org.opensearch.action.termvectors.MultiTermVectorsResponse; @@ -330,6 +318,8 @@ public interface Client extends OpenSearchClient, Releasable { */ void createPit(CreatePITRequest createPITRequest, ActionListener listener); + void deletePit(DeletePITRequest deletePITRequest, ActionListener listener); + /** * Performs multiple search requests. */ diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 547518b7f75ee..837a608caaac4 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -327,24 +327,7 @@ import org.opensearch.action.ingest.SimulatePipelineRequest; import org.opensearch.action.ingest.SimulatePipelineRequestBuilder; import org.opensearch.action.ingest.SimulatePipelineResponse; -import org.opensearch.action.search.ClearScrollAction; -import org.opensearch.action.search.ClearScrollRequest; -import org.opensearch.action.search.ClearScrollRequestBuilder; -import org.opensearch.action.search.ClearScrollResponse; -import org.opensearch.action.search.CreatePITAction; -import org.opensearch.action.search.CreatePITRequest; -import org.opensearch.action.search.CreatePITResponse; -import org.opensearch.action.search.MultiSearchAction; -import org.opensearch.action.search.MultiSearchRequest; -import org.opensearch.action.search.MultiSearchRequestBuilder; -import org.opensearch.action.search.MultiSearchResponse; -import org.opensearch.action.search.SearchAction; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchRequestBuilder; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.action.search.SearchScrollAction; -import org.opensearch.action.search.SearchScrollRequest; -import org.opensearch.action.search.SearchScrollRequestBuilder; +import org.opensearch.action.search.*; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.action.termvectors.MultiTermVectorsAction; @@ -584,6 +567,11 @@ public void createPit(final CreatePITRequest createPITRequest, final ActionListe execute(CreatePITAction.INSTANCE, createPITRequest, listener); } + @Override + public void deletePit(final DeletePITRequest deletePITRequest, final ActionListener listener) { + execute(DeletePITAction.INSTANCE, deletePITRequest, listener); + } + @Override public ActionFuture multiSearch(MultiSearchRequest request) { return execute(MultiSearchAction.INSTANCE, request); diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java new file mode 100644 index 0000000000000..821282d433c0d --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java @@ -0,0 +1,55 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.rest.action.search; + +import org.opensearch.action.search.DeletePITRequest; +import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.Strings; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestStatusToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static java.util.Arrays.asList; +import static java.util.Collections.unmodifiableList; +import static org.opensearch.rest.RestRequest.Method.*; + +public class RestDeletePITAction extends BaseRestHandler { + + @Override + public String getName() { + return "delete_pit_action"; + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + String pitIds = request.param("pit_id"); + DeletePITRequest deletePITRequest = new DeletePITRequest(); + deletePITRequest.setPitIds(asList(Strings.splitStringByCommaToArray(pitIds))); + request.withContentOrSourceParamParserOrNull((xContentParser -> { + if (xContentParser != null) { + // NOTE: if rest request with xcontent body has request parameters, values parsed from request body have the precedence + try { + deletePITRequest.fromXContent(xContentParser); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to parse request body", e); + } + } + })); + return channel -> client.deletePit(deletePITRequest, new RestStatusToXContentListener(channel)); + } + + @Override + public List routes() { + return unmodifiableList(asList(new Route(DELETE, "/_pit"), new Route(DELETE, "/_pit/{id}"))); + } +} diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 415ba9fe20a00..8ac12f8fbb1ef 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1024,6 +1024,22 @@ public boolean freeReaderContext(ShardSearchContextId contextId) { return false; } + /** + * Free reader context if found otherwise return false + */ + public boolean freeReaderContextIfFound(ShardSearchContextId contextId) { + try { + if (getReaderContext(contextId) != null) { + try (ReaderContext context = removeReaderContext(contextId.getId())) { + return context != null; + } + } + } catch (SearchContextMissingException e) { + return true; + } + return true; + } + /** * Update PIT reader with pit id, keep alive and created time etc */ @@ -1055,6 +1071,17 @@ public void freeAllScrollContexts() { } } + /** + * Free all active pit contexts + */ + public void freeAllPitContexts() { + for (ReaderContext readerContext : activeReaders.values()) { + if (readerContext instanceof PitReaderContext) { + freeReaderContextIfFound(readerContext.id()); + } + } + } + private long getKeepAlive(ShardSearchRequest request) { if (request.scroll() != null) { return getScrollKeepAlive(request.scroll()); diff --git a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java new file mode 100644 index 0000000000000..e617ba522bb06 --- /dev/null +++ b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java @@ -0,0 +1,167 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.action.ActionFuture; +import org.opensearch.action.search.*; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.search.builder.PointInTimeBuilder; +import org.opensearch.test.InternalTestCluster; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) +public class DeletePitMultiNodeTests extends OpenSearchIntegTestCase { + + @Before + public void setupIndex() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 5).put("index.number_of_replicas", 1).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); + ensureGreen(); + } + + @After + public void clearIndex() { + client().admin().indices().prepareDelete("index").get(); + } + + private CreatePITResponse createPitOnIndex(String index) throws ExecutionException, InterruptedException { + CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { index }); + ActionFuture execute = client().execute(CreatePITAction.INSTANCE, request); + return execute.get(); + } + + public void testDeletePit() throws Exception { + CreatePITRequest request = new CreatePITRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePITAction.INSTANCE, request); + CreatePITResponse pitResponse = execute.get(); + List pitIds = new ArrayList<>(); + pitIds.add(pitResponse.getId()); + execute = client().execute(CreatePITAction.INSTANCE, request); + pitResponse = execute.get(); + pitIds.add(pitResponse.getId()); + DeletePITRequest deletePITRequest = new DeletePITRequest(pitIds); + ActionFuture deleteExecute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + DeletePITResponse deletePITResponse = deleteExecute.get(); + assertTrue(deletePITResponse.isSucceeded()); + } + + public void testDeletePitWhileNodeDrop() throws Exception { + CreatePITResponse pitResponse = createPitOnIndex("index"); + createIndex("index1", Settings.builder().put("index.number_of_shards", 5).put("index.number_of_replicas", 1).build()); + client().prepareIndex("index1").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); + ensureGreen(); + List pitIds = new ArrayList<>(); + pitIds.add(pitResponse.getId()); + CreatePITResponse pitResponse1 = createPitOnIndex("index1"); + pitIds.add(pitResponse1.getId()); + DeletePITRequest deletePITRequest = new DeletePITRequest(pitIds); + internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + DeletePITResponse deletePITResponse = execute.get(); + assertEquals(false, deletePITResponse.isSucceeded()); + return super.onNodeStopped(nodeName); + } + }); + + ensureGreen(); + ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + DeletePITResponse deletePITResponse = execute.get(); + assertEquals(true, deletePITResponse.isSucceeded()); + client().admin().indices().prepareDelete("index1").get(); + } + + public void testDeleteAllPitsWhileNodeDrop() throws Exception { + createPitOnIndex("index"); + createIndex("index1", Settings.builder().put("index.number_of_shards", 5).put("index.number_of_replicas", 1).build()); + client().prepareIndex("index1").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); + ensureGreen(); + DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + DeletePITResponse deletePITResponse = execute.get(); + assertEquals(false, deletePITResponse.isSucceeded()); + return super.onNodeStopped(nodeName); + } + }); + + ensureGreen(); + ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + DeletePITResponse deletePITResponse = execute.get(); + assertEquals(true, deletePITResponse.isSucceeded()); + client().admin().indices().prepareDelete("index1").get(); + } + + public void testDeleteWhileSearch() throws Exception { + CreatePITResponse pitResponse = createPitOnIndex("index"); + ensureGreen(); + List pitIds = new ArrayList<>(); + pitIds.add(pitResponse.getId()); + DeletePITRequest deletePITRequest = new DeletePITRequest(pitIds); + for(int i=0; i<4; i++) { + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .execute(); + } + int count = 0; + Thread[] threads = new Thread[5]; + CountDownLatch latch = new CountDownLatch(threads.length); + final AtomicBoolean deleted = new AtomicBoolean(false); + + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + latch.countDown(); + try { + latch.await(); + for (int j=0; j<30; j++) { + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .execute().get(); + } + } catch (Exception e) { + if(deleted.get() == true) { + if (!e.getMessage().contains("all shards failed")) + throw new AssertionError(e); + return; + } + throw new AssertionError(e); + } + }); + threads[i].setName("opensearch[node_s_0][search]"); + threads[i].start(); + } + ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + DeletePITResponse deletePITResponse = execute.get(); + deleted.set(true); + assertEquals(true, deletePITResponse.isSucceeded()); + + for (Thread thread : threads) { + thread.join(); + } + } + +} diff --git a/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java index 88e3a5357ddec..29f570f60b000 100644 --- a/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java +++ b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java @@ -21,7 +21,6 @@ import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; -import java.io.IOException; import java.util.concurrent.ExecutionException; import static org.hamcrest.Matchers.containsString; diff --git a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java new file mode 100644 index 0000000000000..1798dcdf100df --- /dev/null +++ b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java @@ -0,0 +1,83 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pit; + +import org.apache.lucene.util.SetOnce; +import org.opensearch.action.ActionListener; +import org.opensearch.action.search.DeletePITRequest; +import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.search.RestDeletePITAction; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.client.NoOpNodeClient; +import org.opensearch.test.rest.FakeRestChannel; +import org.opensearch.test.rest.FakeRestRequest; + +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class RestDeletePitActionTests extends OpenSearchTestCase { + public void testParseDeletePitRequestWithInvalidJsonThrowsException() throws Exception { + RestDeletePITAction action = new RestDeletePITAction(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray("{invalid_json}"), + XContentType.JSON + ).build(); + Exception e = expectThrows(IllegalArgumentException.class, () -> action.prepareRequest(request, null)); + assertThat(e.getMessage(), equalTo("Failed to parse request body")); + } + + public void testBodyParamsOverrideQueryStringParams() throws Exception { + SetOnce pitCalled = new SetOnce<>(); + try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { + @Override + public void deletePit(DeletePITRequest request, ActionListener listener) { + pitCalled.set(true); + assertThat(request.getPitIds(), hasSize(1)); + assertThat(request.getPitIds().get(0), equalTo("BODY")); + } + }) { + RestDeletePITAction action = new RestDeletePITAction(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams( + Collections.singletonMap("pit_id", "QUERY_STRING") + ).withContent(new BytesArray("{\"pit_id\": [\"BODY\"]}"), XContentType.JSON).build(); + FakeRestChannel channel = new FakeRestChannel(request, false, 0); + action.handleRequest(request, channel, nodeClient); + + assertThat(pitCalled.get(), equalTo(true)); + } + } + + public void testDeletePitQueryStringParams() throws Exception { + SetOnce pitCalled = new SetOnce<>(); + try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { + @Override + public void deletePit(DeletePITRequest request, ActionListener listener) { + pitCalled.set(true); + assertThat(request.getPitIds(), hasSize(2)); + assertThat(request.getPitIds().get(0), equalTo("QUERY_STRING")); + assertThat(request.getPitIds().get(1), equalTo("QUERY_STRING_1")); + } + }) { + RestDeletePITAction action = new RestDeletePITAction(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams( + Collections.singletonMap("pit_id", "QUERY_STRING,QUERY_STRING_1") + ).build(); + FakeRestChannel channel = new FakeRestChannel(request, false, 0); + action.handleRequest(request, channel, nodeClient); + + assertThat(pitCalled.get(), equalTo(true)); + } + } +} From d67e3897165e01b226421a2587cee7064dd896f2 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Mon, 18 Apr 2022 10:52:48 +0530 Subject: [PATCH 04/19] Delete PIT API changes Signed-off-by: Bharathwaj G --- .../org/opensearch/action/ActionModule.java | 9 +- .../action/search/DeletePITController.java | 126 ------------------ .../action/search/DeletePITRequest.java | 3 + .../action/search/DeletePITResponse.java | 10 +- .../search/TransportDeletePITAction.java | 21 ++- .../java/org/opensearch/client/Client.java | 16 ++- .../client/support/AbstractClient.java | 22 ++- .../action/search/RestDeletePITAction.java | 4 +- .../org/opensearch/search/SearchService.java | 2 +- .../search/DeletePitMultiNodeTests.java | 56 +++++--- .../opensearch/search/SearchServiceTests.java | 27 ++++ 11 files changed, 141 insertions(+), 155 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/action/search/DeletePITController.java diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index 67f5a3afb1a1b..350d91a560182 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -402,7 +402,14 @@ import org.opensearch.rest.action.ingest.RestGetPipelineAction; import org.opensearch.rest.action.ingest.RestPutPipelineAction; import org.opensearch.rest.action.ingest.RestSimulatePipelineAction; -import org.opensearch.rest.action.search.*; +import org.opensearch.rest.action.search.RestClearScrollAction; +import org.opensearch.rest.action.search.RestCountAction; +import org.opensearch.rest.action.search.RestCreatePITAction; +import org.opensearch.rest.action.search.RestDeletePITAction; +import org.opensearch.rest.action.search.RestExplainAction; +import org.opensearch.rest.action.search.RestMultiSearchAction; +import org.opensearch.rest.action.search.RestSearchAction; +import org.opensearch.rest.action.search.RestSearchScrollAction; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.usage.UsageService; diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITController.java b/server/src/main/java/org/opensearch/action/search/DeletePITController.java deleted file mode 100644 index 88d4a71f77259..0000000000000 --- a/server/src/main/java/org/opensearch/action/search/DeletePITController.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.action.search; - -import org.opensearch.action.ActionListener; -import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.util.concurrent.CountDown; -import org.opensearch.search.SearchPhaseResult; -import org.opensearch.search.internal.ShardSearchContextId; -import org.opensearch.transport.Transport; -import org.opensearch.transport.TransportResponse; - -import java.util.*; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; - -public class DeletePITController implements Runnable { - private final DiscoveryNodes nodes; - private final SearchTransportService searchTransportService; - private final CountDown expectedOps; - private final ActionListener listener; - private final AtomicBoolean hasFailed = new AtomicBoolean(false); - private final AtomicInteger freedSearchContexts = new AtomicInteger(0); - private final ClusterService clusterService; - private final Runnable runner; - - public DeletePITController( - DeletePITRequest request, - ActionListener listener, - ClusterService clusterService, - SearchTransportService searchTransportService - ) { - this.nodes = clusterService.state().getNodes(); - this.clusterService = clusterService; - this.searchTransportService = searchTransportService; - this.listener = listener; - List pitIds = request.getPitIds(); - final int expectedOps; - if (pitIds.size() == 1 && "_all".equals(pitIds.get(0))) { - expectedOps = nodes.getSize(); - runner = this::deleteAllPits; - } else { - // TODO: replace this with #closeContexts - List contexts = new ArrayList<>(); - for (String scrollId : request.getPitIds()) { - SearchContextIdForNode[] context = TransportSearchHelper.parseScrollId(scrollId).getContext(); - Collections.addAll(contexts, context); - } - if (contexts.isEmpty()) { - expectedOps = 0; - runner = () -> listener.onResponse(new DeletePITResponse(true)); - } else { - expectedOps = contexts.size(); - runner = () -> ClearScrollController.closeContexts( - clusterService.state().nodes(), - searchTransportService, - contexts, - ActionListener.wrap(r -> listener.onResponse(new DeletePITResponse(true)), listener::onFailure) - ); - } - } - this.expectedOps = new CountDown(expectedOps); - - } - - @Override - public void run() { - runner.run(); - } - - void deleteAllPits() { - for (final DiscoveryNode node : clusterService.state().getNodes()) { - try { - Transport.Connection connection = searchTransportService.getConnection(null, node); - searchTransportService.sendDeleteAllPitContexts(connection, new ActionListener() { - @Override - public void onResponse(TransportResponse response) { - onFreedContext(true); - } - - @Override - public void onFailure(Exception e) { - onFailedFreedContext(e, node); - } - }); - } catch (Exception e) { - onFailedFreedContext(e, node); - } - } - } - - public static class PITSinglePhaseSearchResult extends SearchPhaseResult { - public void setContextId(ShardSearchContextId contextId) { - this.contextId = contextId; - } - } - - private void onFreedContext(boolean freed) { - if (freed) { - freedSearchContexts.incrementAndGet(); - } - if (expectedOps.countDown()) { - boolean succeeded = hasFailed.get() == false; - listener.onResponse(new DeletePITResponse(succeeded)); - } - } - - private void onFailedFreedContext(Throwable e, DiscoveryNode node) { - /* - * We have to set the failure marker before we count down otherwise we can expose the failure marker before we have set it to a - * racing thread successfully freeing a context. This would lead to that thread responding that the clear scroll succeeded. - */ - hasFailed.set(true); - if (expectedOps.countDown()) { - listener.onResponse(new DeletePITResponse(false)); - } - } -} diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java b/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java index 2bd8e9681318b..04b3aeb0f6a07 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java @@ -29,6 +29,9 @@ */ public class DeletePITRequest extends ActionRequest implements ToXContentObject { + /** + * List of PIT IDs to be deleted , and use "_all" to delete all PIT reader contexts + */ private List pitIds; public DeletePITRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java b/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java index 388ca99b42402..220f5377bc1ce 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java @@ -12,7 +12,12 @@ import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.*; +import org.opensearch.common.xcontent.ConstructingObjectParser; +import org.opensearch.common.xcontent.ObjectParser; +import org.opensearch.common.xcontent.StatusToXContentObject; +import org.opensearch.common.xcontent.ToXContent; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; import org.opensearch.rest.RestStatus; import java.io.IOException; @@ -23,6 +28,9 @@ public class DeletePITResponse extends ActionResponse implements StatusToXContentObject { + /** + * This will be true if all PIT reader contexts are deleted. + */ private final boolean succeeded; public DeletePITResponse(boolean succeeded) { diff --git a/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java b/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java index c28ff8797a2d0..918b5a791c9f0 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; @@ -33,6 +34,9 @@ import java.util.function.BiFunction; import java.util.stream.Collectors; +/** + * Transport action for deleting pit reader context - supports deleting list and all pit contexts + */ public class TransportDeletePITAction extends HandledTransportAction { private SearchService searchService; private final NamedWriteableRegistry namedWriteableRegistry; @@ -74,15 +78,21 @@ protected void doExecute(Task task, DeletePITRequest request, ActionListener new ParameterizedMessage("Delete PITs failed. " + "Cleared {} contexts out of {}", r, contexts.size()) + ); listener.onResponse(new DeletePITResponse(false)); } }, e -> { - logger.debug("Delete PIT failed ", e); + logger.debug("Delete PITs failed ", e); listener.onResponse(new DeletePITResponse(false)); })); } } + /** + * Delete all active PIT reader contexts + */ void deleteAllPits(ActionListener listener) { int size = clusterService.state().getNodes().getSize(); ActionListener groupedActionListener = getGroupedListener(listener, size); @@ -96,11 +106,17 @@ void deleteAllPits(ActionListener listener) { } } + /** + * Delete list of pits, return success if all reader contexts are deleted ( or not found ). + */ void deletePits(List contexts, ActionListener listener) { final StepListener> lookupListener = getLookupListener(contexts); lookupListener.whenComplete(nodeLookup -> { final GroupedActionListener groupedListener = new GroupedActionListener<>( - ActionListener.delegateFailure(listener, (l, rs) -> l.onResponse(Math.toIntExact(rs.stream().filter(r -> r).count()))), + ActionListener.delegateFailure( + listener, + (l, result) -> l.onResponse(Math.toIntExact(result.stream().filter(r -> r).count())) + ), contexts.size() ); @@ -117,6 +133,7 @@ void deletePits(List contexts, ActionListener l ActionListener.wrap(r -> groupedListener.onResponse(r.isFreed()), e -> groupedListener.onResponse(false)) ); } catch (Exception e) { + logger.debug("Delete PIT failed ", e); groupedListener.onResponse(false); } } diff --git a/server/src/main/java/org/opensearch/client/Client.java b/server/src/main/java/org/opensearch/client/Client.java index 5d9c958b3b4b2..472503379fec4 100644 --- a/server/src/main/java/org/opensearch/client/Client.java +++ b/server/src/main/java/org/opensearch/client/Client.java @@ -55,7 +55,21 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.search.*; +import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.ClearScrollRequestBuilder; +import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePITRequest; +import org.opensearch.action.search.CreatePITResponse; +import org.opensearch.action.search.DeletePITRequest; +import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.MultiSearchRequestBuilder; +import org.opensearch.action.search.MultiSearchResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchRequestBuilder; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.action.search.SearchScrollRequestBuilder; import org.opensearch.action.termvectors.MultiTermVectorsRequest; import org.opensearch.action.termvectors.MultiTermVectorsRequestBuilder; import org.opensearch.action.termvectors.MultiTermVectorsResponse; diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 837a608caaac4..21a5f9ce89c56 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -327,7 +327,27 @@ import org.opensearch.action.ingest.SimulatePipelineRequest; import org.opensearch.action.ingest.SimulatePipelineRequestBuilder; import org.opensearch.action.ingest.SimulatePipelineResponse; -import org.opensearch.action.search.*; +import org.opensearch.action.search.ClearScrollAction; +import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.ClearScrollRequestBuilder; +import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePITAction; +import org.opensearch.action.search.CreatePITRequest; +import org.opensearch.action.search.CreatePITResponse; +import org.opensearch.action.search.DeletePITAction; +import org.opensearch.action.search.DeletePITRequest; +import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.action.search.MultiSearchAction; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.MultiSearchRequestBuilder; +import org.opensearch.action.search.MultiSearchResponse; +import org.opensearch.action.search.SearchAction; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchRequestBuilder; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchScrollAction; +import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.action.search.SearchScrollRequestBuilder; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.action.termvectors.MultiTermVectorsAction; diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java index 821282d433c0d..26739d3749f92 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java @@ -21,7 +21,7 @@ import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; -import static org.opensearch.rest.RestRequest.Method.*; +import static org.opensearch.rest.RestRequest.Method.DELETE; public class RestDeletePITAction extends BaseRestHandler { @@ -50,6 +50,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client @Override public List routes() { - return unmodifiableList(asList(new Route(DELETE, "/_pit"), new Route(DELETE, "/_pit/{id}"))); + return unmodifiableList(asList(new Route(DELETE, "/_search/_point_in_time"), new Route(DELETE, "/_search/_point_in_time/{id}"))); } } diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 0ae58c2314cf5..180627c9d0ee1 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1025,7 +1025,7 @@ public boolean freeReaderContext(ShardSearchContextId contextId) { } /** - * Free reader context if found otherwise return false + * Free reader context if found , return false if delete reader fails */ public boolean freeReaderContextIfFound(ShardSearchContextId contextId) { try { diff --git a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java index e617ba522bb06..a117e891c6e6d 100644 --- a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java +++ b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java @@ -11,7 +11,12 @@ import org.junit.After; import org.junit.Before; import org.opensearch.action.ActionFuture; -import org.opensearch.action.search.*; +import org.opensearch.action.search.CreatePITAction; +import org.opensearch.action.search.CreatePITRequest; +import org.opensearch.action.search.CreatePITResponse; +import org.opensearch.action.search.DeletePITAction; +import org.opensearch.action.search.DeletePITRequest; +import org.opensearch.action.search.DeletePITResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.search.builder.PointInTimeBuilder; @@ -62,6 +67,13 @@ public void testDeletePit() throws Exception { ActionFuture deleteExecute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); DeletePITResponse deletePITResponse = deleteExecute.get(); assertTrue(deletePITResponse.isSucceeded()); + /** + * Checking deleting the same PIT id again results in succeeded + */ + deleteExecute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + deletePITResponse = deleteExecute.get(); + assertTrue(deletePITResponse.isSucceeded()); + } public void testDeletePitWhileNodeDrop() throws Exception { @@ -79,15 +91,19 @@ public void testDeletePitWhileNodeDrop() throws Exception { public Settings onNodeStopped(String nodeName) throws Exception { ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); DeletePITResponse deletePITResponse = execute.get(); - assertEquals(false, deletePITResponse.isSucceeded()); + assertFalse(deletePITResponse.isSucceeded()); return super.onNodeStopped(nodeName); } }); ensureGreen(); + /** + * When we invoke delete again, returns success after clearing the remaining readers. Asserting reader context + * not found exceptions don't result in failures ( as deletion in one node is successful ) + */ ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); DeletePITResponse deletePITResponse = execute.get(); - assertEquals(true, deletePITResponse.isSucceeded()); + assertTrue(deletePITResponse.isSucceeded()); client().admin().indices().prepareDelete("index1").get(); } @@ -102,15 +118,19 @@ public void testDeleteAllPitsWhileNodeDrop() throws Exception { public Settings onNodeStopped(String nodeName) throws Exception { ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); DeletePITResponse deletePITResponse = execute.get(); - assertEquals(false, deletePITResponse.isSucceeded()); + assertFalse(deletePITResponse.isSucceeded()); return super.onNodeStopped(nodeName); } }); ensureGreen(); + /** + * When we invoke delete again, returns success after clearing the remaining readers. Asserting reader context + * not found exceptions don't result in failures ( as deletion in one node is successful ) + */ ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); DeletePITResponse deletePITResponse = execute.get(); - assertEquals(true, deletePITResponse.isSucceeded()); + assertTrue(deletePITResponse.isSucceeded()); client().admin().indices().prepareDelete("index1").get(); } @@ -120,13 +140,6 @@ public void testDeleteWhileSearch() throws Exception { List pitIds = new ArrayList<>(); pitIds.add(pitResponse.getId()); DeletePITRequest deletePITRequest = new DeletePITRequest(pitIds); - for(int i=0; i<4; i++) { - client().prepareSearch() - .setSize(2) - .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) - .execute(); - } - int count = 0; Thread[] threads = new Thread[5]; CountDownLatch latch = new CountDownLatch(threads.length); final AtomicBoolean deleted = new AtomicBoolean(false); @@ -136,16 +149,19 @@ public void testDeleteWhileSearch() throws Exception { latch.countDown(); try { latch.await(); - for (int j=0; j<30; j++) { + for (int j = 0; j < 30; j++) { client().prepareSearch() - .setSize(2) - .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) - .execute().get(); + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .execute() + .get(); } } catch (Exception e) { - if(deleted.get() == true) { - if (!e.getMessage().contains("all shards failed")) - throw new AssertionError(e); + /** + * assert for exception once delete pit goes through. throw error in case of any exeption before that. + */ + if (deleted.get() == true) { + if (!e.getMessage().contains("all shards failed")) throw new AssertionError(e); return; } throw new AssertionError(e); @@ -157,7 +173,7 @@ public void testDeleteWhileSearch() throws Exception { ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); DeletePITResponse deletePITResponse = execute.get(); deleted.set(true); - assertEquals(true, deletePITResponse.isSucceeded()); + assertTrue(deletePITResponse.isSucceeded()); for (Thread thread : threads) { thread.join(); diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 18a0d55c487c5..7c6ec33e7c1f0 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -1414,6 +1414,33 @@ public void testOpenReaderContext() { assertTrue(searchService.freeReaderContext(future.actionGet())); } + public void testDeletePitReaderContext() { + createIndex("index"); + SearchService searchService = getInstanceFromNode(SearchService.class); + PlainActionFuture future = new PlainActionFuture<>(); + searchService.createPitReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); + future.actionGet(); + assertThat(searchService.getActiveContexts(), equalTo(1)); + assertTrue(searchService.freeReaderContextIfFound(future.actionGet())); + // assert true for reader context not found + assertTrue(searchService.freeReaderContextIfFound(future.actionGet())); + // adding this assert to showcase behavior difference + assertFalse(searchService.freeReaderContext(future.actionGet())); + } + + public void testDeleteAllPitReaderContexts() { + createIndex("index"); + SearchService searchService = getInstanceFromNode(SearchService.class); + PlainActionFuture future = new PlainActionFuture<>(); + searchService.createPitReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); + future.actionGet(); + searchService.createPitReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); + future.actionGet(); + assertThat(searchService.getActiveContexts(), equalTo(2)); + searchService.freeAllPitContexts(); + assertThat(searchService.getActiveContexts(), equalTo(0)); + } + public void testPitContextMaxKeepAlive() { createIndex("index"); SearchService searchService = getInstanceFromNode(SearchService.class); From 65c123a1128c94e74c0da0cb36cdff0fca528598 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Fri, 29 Apr 2022 17:31:28 +0530 Subject: [PATCH 05/19] Addressing review comments Signed-off-by: Bharathwaj G --- .../search/slice/SearchSliceIT.java | 62 +++++++++++++++++++ .../action/search/CreatePITRequest.java | 1 + .../action/search/CreatePITResponse.java | 3 + .../action/search/PITController.java | 26 +++++--- .../common/settings/ClusterSettings.java | 1 - .../org/opensearch/search/SearchService.java | 4 +- .../main/java/org/opensearch/tasks/Task.java | 4 -- .../opensearch/search/PitMultiNodeTests.java | 2 + 8 files changed, 86 insertions(+), 17 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index 9c735c42052e3..3bc932201c1f3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -32,9 +32,13 @@ package org.opensearch.search.slice; +import org.opensearch.action.ActionFuture; import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; import org.opensearch.action.index.IndexRequestBuilder; +import org.opensearch.action.search.CreatePITAction; +import org.opensearch.action.search.CreatePITRequest; +import org.opensearch.action.search.CreatePITResponse; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; @@ -46,6 +50,7 @@ import org.opensearch.search.Scroll; import org.opensearch.search.SearchException; import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -129,6 +134,63 @@ public void testSearchSort() throws Exception { } } + public void testSearchSortWithPIT() throws Exception { + int numShards = randomIntBetween(1, 7); + int numDocs = randomIntBetween(100, 1000); + setupIndex(numDocs, numShards); + int max = randomIntBetween(2, numShards * 3); + CreatePITRequest pitRequest = new CreatePITRequest(TimeValue.timeValueDays(1), true); + pitRequest.setIndices(new String[] { "test" }); + ActionFuture execute = client().execute(CreatePITAction.INSTANCE, pitRequest); + CreatePITResponse pitResponse = execute.get(); + for (String field : new String[] { "_id", "random_int", "static_int" }) { + int fetchSize = randomIntBetween(10, 100); + + // test _doc sort + SearchRequestBuilder request = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(fetchSize) + .addSort(SortBuilders.fieldSort("_doc")); + assertSearchSlicesWithPIT(request, field, max, numDocs); + + // test numeric sort + request = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(fetchSize) + .addSort(SortBuilders.fieldSort("random_int")); + assertSearchSlicesWithPIT(request, field, max, numDocs); + } + } + + private void assertSearchSlicesWithPIT(SearchRequestBuilder request, String field, int numSlice, int numDocs) { + int totalResults = 0; + List keys = new ArrayList<>(); + for (int id = 0; id < numSlice; id++) { + SliceBuilder sliceBuilder = new SliceBuilder(field, id, numSlice); + SearchResponse searchResponse = request.slice(sliceBuilder).setFrom(0).get(); + totalResults += searchResponse.getHits().getHits().length; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int numSliceResults = searchResponse.getHits().getHits().length; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } + while (searchResponse.getHits().getHits().length > 0) { + searchResponse = request.setFrom(numSliceResults).slice(sliceBuilder).get(); + totalResults += searchResponse.getHits().getHits().length; + numSliceResults += searchResponse.getHits().getHits().length; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + } + assertThat(totalResults, equalTo(numDocs)); + assertThat(keys.size(), equalTo(numDocs)); + assertThat(new HashSet(keys).size(), equalTo(numDocs)); + } + public void testWithPreferenceAndRoutings() throws Exception { int numShards = 10; int totalDocs = randomIntBetween(100, 1000); diff --git a/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java b/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java index 51d263d7ea856..0b7efa73c7e57 100644 --- a/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java +++ b/server/src/main/java/org/opensearch/action/search/CreatePITRequest.java @@ -152,6 +152,7 @@ public final String buildDescription() { Strings.arrayToDelimitedString(indices, ",", sb); sb.append("], "); sb.append("pointintime[").append(keepAlive).append("], "); + sb.append("allowPartialPitCreation[").append(allowPartialPitCreation).append("], "); return sb.toString(); } diff --git a/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java b/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java index 3812cb42b22b3..0b22f2bc95c17 100644 --- a/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java +++ b/server/src/main/java/org/opensearch/action/search/CreatePITResponse.java @@ -31,6 +31,9 @@ public class CreatePITResponse extends ActionResponse implements StatusToXConten private final ShardSearchFailure[] shardFailures; public CreatePITResponse(SearchResponse searchResponse) { + if (searchResponse.pointInTimeId() == null || searchResponse.pointInTimeId().isEmpty()) { + throw new IllegalArgumentException("Point in time ID is empty"); + } this.id = searchResponse.pointInTimeId(); this.totalShards = searchResponse.getTotalShards(); this.successfulShards = searchResponse.getSuccessfulShards(); diff --git a/server/src/main/java/org/opensearch/action/search/PITController.java b/server/src/main/java/org/opensearch/action/search/PITController.java index c61484bb6b348..2e8e517960746 100644 --- a/server/src/main/java/org/opensearch/action/search/PITController.java +++ b/server/src/main/java/org/opensearch/action/search/PITController.java @@ -29,6 +29,7 @@ import org.opensearch.transport.Transport; import java.util.Collection; +import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.function.BiFunction; @@ -37,10 +38,7 @@ import static org.opensearch.common.unit.TimeValue.timeValueSeconds; /** - * Controller for creating PIT reader context - * Phase 1 of create PIT request : Create PIT reader contexts in the associated shards with a temporary keep alive - * Phase 2 of create PIT : Update PIT reader context with PIT ID and keep alive from request and - * fail user request if any of the updates in this phase are failed - we clean up PITs in case of such failures + * Controller to handle PIT related logic */ public class PITController implements Runnable { private final Runnable runner; @@ -81,6 +79,12 @@ private TimeValue getCreatePitTemporaryKeepAlive() { return CREATE_PIT_TEMPORARY_KEEPALIVE_SETTING.get(clusterService.getSettings()); } + /** + * Method for creating PIT reader context + * Phase 1 of create PIT request : Create PIT reader contexts in the associated shards with a temporary keep alive + * Phase 2 of create PIT : Update PIT reader context with PIT ID and keep alive from request and + * fail user request if any of the updates in this phase are failed - we clean up PITs in case of such failures + */ public void executeCreatePit() { SearchRequest searchRequest = new SearchRequest(request.getIndices()); searchRequest.preference(request.getPreference()); @@ -94,7 +98,7 @@ public void executeCreatePit() { task.getAction(), () -> task.getDescription(), task.getParentTaskId(), - task.getHeaders() + new HashMap<>() ); final StepListener createPitListener = new StepListener<>(); @@ -118,6 +122,7 @@ public void executeCreatePit() { * Creates PIT reader context with temporary keep alive */ void executeCreatePit(Task task, SearchRequest searchRequest, StepListener createPitListener) { + logger.debug("Creating PIT context"); transportSearchAction.executeRequest( task, searchRequest, @@ -152,6 +157,7 @@ void executeUpdatePitId( ActionListener updatePitIdListener ) { createPitListener.whenComplete(searchResponse -> { + logger.debug("Updating PIT context with PIT ID, creation time and keep alive"); CreatePITResponse createPITResponse = new CreatePITResponse(searchResponse); SearchContextId contextId = SearchContextId.decode(namedWriteableRegistry, createPITResponse.getId()); final StepListener> lookupListener = getConnectionLookupListener(contextId); @@ -166,7 +172,7 @@ void executeUpdatePitId( * store the create time ( same create time for all PIT contexts across shards ) to be used * for list PIT api */ - long createTime = System.currentTimeMillis(); + final long createTime = System.currentTimeMillis(); for (Map.Entry entry : contextId.shards().entrySet()) { DiscoveryNode node = nodelookup.apply(entry.getValue().getClusterAlias(), entry.getValue().getNode()); try { @@ -205,10 +211,10 @@ private StepListener> getConnectionLoo final StepListener> lookupListener = new StepListener<>(); - if (clusters.isEmpty() == false) { - searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); - } else { + if (clusters.isEmpty()) { lookupListener.onResponse((cluster, nodeId) -> state.getNodes().get(nodeId)); + } else { + searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); } return lookupListener; } @@ -246,7 +252,7 @@ public void onResponse(Integer freed) { @Override public void onFailure(Exception e) { - logger.debug("Cleaning up PIT contexts failed ", e); + logger.error("Cleaning up PIT contexts failed ", e); } }; ClearScrollController.closeContexts(clusterService.state().getNodes(), searchTransportService, contexts, deleteListener); diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index cfb22b0518f7e..e89c933dc708b 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -32,7 +32,6 @@ package org.opensearch.common.settings; import org.apache.logging.log4j.LogManager; -import org.opensearch.action.main.TransportMainAction; import org.opensearch.action.search.PITController; import org.opensearch.cluster.routing.allocation.decider.NodeLoadAwareAllocationDecider; import org.opensearch.index.IndexModule; diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 823bcbc6376ab..6dca959bcac70 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1297,8 +1297,8 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc } if (source.slice() != null) { - if (context.scrollContext() == null) { - throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context"); + if (context.scrollContext() == null || context.readerContext() instanceof PitReaderContext) { + throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context or PIT context"); } context.sliceBuilder(source.slice()); } diff --git a/server/src/main/java/org/opensearch/tasks/Task.java b/server/src/main/java/org/opensearch/tasks/Task.java index d2e91d6ee3418..a51af17ae8ea2 100644 --- a/server/src/main/java/org/opensearch/tasks/Task.java +++ b/server/src/main/java/org/opensearch/tasks/Task.java @@ -364,10 +364,6 @@ public String getHeader(String header) { return headers.get(header); } - public Map getHeaders() { - return headers; - } - public TaskResult result(DiscoveryNode node, Exception error) throws IOException { return new TaskResult(taskInfo(node.getId(), true, true), error); } diff --git a/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java index 32a2e7b21eb6f..3efdcadb8e09f 100644 --- a/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java +++ b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java @@ -104,6 +104,8 @@ public Settings onNodeStopped(String nodeName) throws Exception { .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) .get(); assertEquals(1, searchResponse.getSuccessfulShards()); + assertEquals(1, searchResponse.getFailedShards()); + assertEquals(0, searchResponse.getSkippedShards()); assertEquals(2, searchResponse.getTotalShards()); return super.onNodeStopped(nodeName); } From 77be351b3c345fd49972767c09ac6fa462cbee64 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Mon, 2 May 2022 19:05:37 +0530 Subject: [PATCH 06/19] addressing review comments Signed-off-by: Bharathwaj G --- .../org/opensearch/search/SearchService.java | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 8a0a08ad5d703..77fa1fe50a83c 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1018,6 +1018,34 @@ public boolean freeReaderContext(ShardSearchContextId contextId) { return false; } + /** + * Free reader context if found otherwise return false + */ + public boolean freeReaderContextIfFound(ShardSearchContextId contextId) { + try { + if (getReaderContext(contextId) != null) { + try (ReaderContext context = removeReaderContext(contextId.getId())) { + return context != null; + } + } + } catch (SearchContextMissingException e) { + return true; + } + return true; + } + + + /** + * Free all active pit contexts + */ + public void freeAllPitContexts() { + for (ReaderContext readerContext : activeReaders.values()) { + if (readerContext instanceof PitReaderContext) { + freeReaderContextIfFound(readerContext.id()); + } + } + } + /** * Update PIT reader with pit id, keep alive and created time etc */ From 51ce82f556ba1f73a59b08cf5570364662e4ddbc Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Tue, 3 May 2022 13:38:08 +0530 Subject: [PATCH 07/19] addressing review comments Signed-off-by: Bharathwaj G --- .../action/search/SearchTransportService.java | 98 +-- .../search/TransportDeletePITAction.java | 50 +- .../org/opensearch/search/SearchService.java | 12 +- .../search/CreatePitControllerTests.java | 14 +- .../search/TransportDeletePITActionTests.java | 584 ++++++++++++++++++ .../search/DeletePitMultiNodeTests.java | 18 + 6 files changed, 706 insertions(+), 70 deletions(-) create mode 100644 server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java diff --git a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java index b8abb6f96e574..b664d19597c9d 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java @@ -83,10 +83,10 @@ public class SearchTransportService { public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]"; - public static final String FREE_CONTEXT_PIT_ACTION_NAME = "indices:data/read/search[free_context/pit]"; + public static final String FREE_PIT_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context/pit]"; public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]"; public static final String CLEAR_SCROLL_CONTEXTS_ACTION_NAME = "indices:data/read/search[clear_scroll_contexts]"; - public static final String DELETE_ALL_PIT_CONTEXTS_ACTION_NAME = "indices:data/read/search[delete_pit_contexts]"; + public static final String FREE_ALL_PIT_CONTEXTS_ACTION_NAME = "indices:data/read/search[delete_pit_contexts]"; public static final String DFS_ACTION_NAME = "indices:data/read/search[phase/dfs]"; public static final String QUERY_ACTION_NAME = "indices:data/read/search[phase/query]"; public static final String QUERY_ID_ACTION_NAME = "indices:data/read/search[phase/query/id]"; @@ -144,20 +144,6 @@ public void sendFreeContext( ); } - public void sendPitFreeContext( - Transport.Connection connection, - ShardSearchContextId contextId, - ActionListener listener - ) { - transportService.sendRequest( - connection, - FREE_CONTEXT_PIT_ACTION_NAME, - new ScrollFreeContextRequest(contextId), - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new) - ); - } - public void updatePitContext( Transport.Connection connection, UpdatePITContextRequest request, @@ -214,13 +200,27 @@ public void sendClearAllScrollContexts(Transport.Connection connection, final Ac ); } - public void sendDeleteAllPitContexts(Transport.Connection connection, final ActionListener listener) { + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + transportService.sendRequest( + connection, + FREE_PIT_CONTEXT_ACTION_NAME, + new PITFreeContextRequest(contextId), + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new) + ); + } + + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { transportService.sendRequest( connection, - DELETE_ALL_PIT_CONTEXTS_ACTION_NAME, + FREE_ALL_PIT_CONTEXTS_ACTION_NAME, TransportRequest.Empty.INSTANCE, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(listener, (in) -> TransportResponse.Empty.INSTANCE) + new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new) ); } @@ -389,6 +389,30 @@ public ShardSearchContextId id() { } + static class PITFreeContextRequest extends TransportRequest { + private ShardSearchContextId contextId; + + PITFreeContextRequest(ShardSearchContextId contextId) { + this.contextId = Objects.requireNonNull(contextId); + } + + PITFreeContextRequest(StreamInput in) throws IOException { + super(in); + contextId = new ShardSearchContextId(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + contextId.writeTo(out); + } + + public ShardSearchContextId id() { + return this.contextId; + } + + } + static class SearchFreeContextRequest extends ScrollFreeContextRequest implements IndicesRequest { private OriginalIndices originalIndices; @@ -465,15 +489,30 @@ public static void registerRequestHandler(TransportService transportService, Sea TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, SearchFreeContextResponse::new); transportService.registerRequestHandler( - FREE_CONTEXT_PIT_ACTION_NAME, + FREE_PIT_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, - ScrollFreeContextRequest::new, + PITFreeContextRequest::new, (request, channel, task) -> { boolean freed = searchService.freeReaderContextIfFound(request.id()); channel.sendResponse(new SearchFreeContextResponse(freed)); } ); - TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_PIT_ACTION_NAME, SearchFreeContextResponse::new); + TransportActionProxy.registerProxyAction(transportService, FREE_PIT_CONTEXT_ACTION_NAME, SearchFreeContextResponse::new); + + transportService.registerRequestHandler( + FREE_ALL_PIT_CONTEXTS_ACTION_NAME, + ThreadPool.Names.SAME, + TransportRequest.Empty::new, + (request, channel, task) -> { + boolean freed = searchService.freeAllPitContexts(); + channel.sendResponse(new SearchFreeContextResponse(freed)); + } + ); + TransportActionProxy.registerProxyAction( + transportService, + FREE_ALL_PIT_CONTEXTS_ACTION_NAME, + (in) -> TransportResponse.Empty.INSTANCE + ); transportService.registerRequestHandler( FREE_CONTEXT_ACTION_NAME, @@ -658,21 +697,6 @@ public static void registerRequestHandler(TransportService transportService, Sea ); TransportActionProxy.registerProxyAction(transportService, UPDATE_READER_CONTEXT_ACTION_NAME, UpdatePitContextResponse::new); - transportService.registerRequestHandler( - DELETE_ALL_PIT_CONTEXTS_ACTION_NAME, - ThreadPool.Names.SAME, - TransportRequest.Empty::new, - (request, channel, task) -> { - searchService.freeAllPitContexts(); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - ); - TransportActionProxy.registerProxyAction( - transportService, - DELETE_ALL_PIT_CONTEXTS_ACTION_NAME, - (in) -> TransportResponse.Empty.INSTANCE - ); - } /** diff --git a/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java b/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java index 918b5a791c9f0..5785148913337 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.util.SetOnce; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; @@ -22,7 +23,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.search.SearchService; import org.opensearch.tasks.Task; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportService; @@ -38,7 +38,6 @@ * Transport action for deleting pit reader context - supports deleting list and all pit contexts */ public class TransportDeletePITAction extends HandledTransportAction { - private SearchService searchService; private final NamedWriteableRegistry namedWriteableRegistry; private TransportSearchAction transportSearchAction; private final ClusterService clusterService; @@ -47,7 +46,6 @@ public class TransportDeletePITAction extends HandledTransportAction listener) { int size = clusterService.state().getNodes().getSize(); - ActionListener groupedActionListener = getGroupedListener(listener, size); + ActionListener groupedActionListener = new GroupedActionListener( + new ActionListener<>() { + @Override + public void onResponse(final Collection responses) { + final SetOnce succeeded = new SetOnce<>(); + for (SearchTransportService.SearchFreeContextResponse response : responses) { + if (!response.isFreed()) { + succeeded.set(false); + break; + } + } + succeeded.trySet(true); + listener.onResponse(new DeletePITResponse(succeeded.get())); + } + + @Override + public void onFailure(final Exception e) { + logger.debug("Delete all PITs failed ", e); + listener.onResponse(new DeletePITResponse(false)); + } + }, + size + ); for (final DiscoveryNode node : clusterService.state().getNodes()) { try { Transport.Connection connection = searchTransportService.getConnection(null, node); - searchTransportService.sendDeleteAllPitContexts(connection, groupedActionListener); + searchTransportService.sendFreeAllPitContexts(connection, groupedActionListener); } catch (Exception e) { groupedActionListener.onFailure(e); } @@ -123,11 +142,11 @@ void deletePits(List contexts, ActionListener l for (SearchContextIdForNode contextId : contexts) { final DiscoveryNode node = nodeLookup.apply(contextId.getClusterAlias(), contextId.getNode()); if (node == null) { - groupedListener.onFailure(new OpenSearchException("node not connected")); + groupedListener.onFailure(new OpenSearchException("node not found")); } else { try { final Transport.Connection connection = searchTransportService.getConnection(contextId.getClusterAlias(), node); - searchTransportService.sendPitFreeContext( + searchTransportService.sendFreePITContext( connection, contextId.getSearchContextId(), ActionListener.wrap(r -> groupedListener.onResponse(r.isFreed()), e -> groupedListener.onResponse(false)) @@ -154,19 +173,4 @@ private StepListener> getLookupListene } return lookupListener; } - - private ActionListener getGroupedListener(ActionListener deletePitListener, int size) { - return new GroupedActionListener<>(new ActionListener<>() { - @Override - public void onResponse(final Collection responses) { - deletePitListener.onResponse(new DeletePITResponse(true)); - } - - @Override - public void onFailure(final Exception e) { - logger.debug("Delete all PITs failed ", e); - deletePitListener.onResponse(new DeletePITResponse(false)); - } - }, size); - } } diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 77fa1fe50a83c..c196f68e28baa 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -36,6 +36,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.util.SetOnce; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; @@ -1034,16 +1035,21 @@ public boolean freeReaderContextIfFound(ShardSearchContextId contextId) { return true; } - /** * Free all active pit contexts */ - public void freeAllPitContexts() { + public boolean freeAllPitContexts() { + final SetOnce isFreed = new SetOnce<>(); for (ReaderContext readerContext : activeReaders.values()) { if (readerContext instanceof PitReaderContext) { - freeReaderContextIfFound(readerContext.id()); + final boolean succeeded = freeReaderContextIfFound(readerContext.id()); + if (!succeeded) { + isFreed.trySet(false); + } } } + isFreed.trySet(true); + return isFreed.get(); } /** diff --git a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java index 9742a58346e73..9f9b22a3b9d7d 100644 --- a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java +++ b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java @@ -69,7 +69,7 @@ public void setupData() { node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); - setPitId(); + pitId = getPitId(); namedWriteableRegistry = new NamedWriteableRegistry( Arrays.asList( new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new), @@ -431,7 +431,7 @@ public void onFailure(Exception e) { } - QueryBuilder randomQueryBuilder() { + public static QueryBuilder randomQueryBuilder() { if (randomBoolean()) { return new TermQueryBuilder(randomAlphaOfLength(10), randomAlphaOfLength(10)); } else if (randomBoolean()) { @@ -441,21 +441,21 @@ QueryBuilder randomQueryBuilder() { } } - private void setPitId() { + public static String getPitId() { AtomicArray array = new AtomicArray<>(3); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult( new ShardSearchContextId("a", 1), - node1 + null ); testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult( new ShardSearchContextId("b", 12), - node2 + null ); testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult( new ShardSearchContextId("c", 42), - node3 + null ); testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); array.setOnce(0, testSearchPhaseResult1); @@ -477,7 +477,7 @@ private void setPitId() { aliasFilters.put(result.getSearchShardTarget().getShardId().getIndex().getUUID(), aliasFilter); } } - pitId = SearchContextId.encode(array.asList(), aliasFilters, version); + return SearchContextId.encode(array.asList(), aliasFilters, version); } } diff --git a/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java new file mode 100644 index 0000000000000..347c5a11630de --- /dev/null +++ b/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java @@ -0,0 +1,584 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.opensearch.Version; +import org.opensearch.action.ActionListener; +import org.opensearch.action.support.ActionFilter; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.PlainActionFuture; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.UUIDs; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.settings.Settings; +import org.opensearch.index.query.IdsQueryBuilder; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.search.internal.ShardSearchContextId; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskId; +import org.opensearch.tasks.TaskManager; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.Transport; +import org.opensearch.transport.TransportResponse; +import org.opensearch.transport.TransportService; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.ExecutionException; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.opensearch.action.support.PlainActionFuture.newFuture; + +public class TransportDeletePITActionTests extends OpenSearchTestCase { + + DiscoveryNode node1 = null; + DiscoveryNode node2 = null; + DiscoveryNode node3 = null; + String pitId = null; + TransportSearchAction transportSearchAction = null; + Task task = null; + DiscoveryNodes nodes = null; + NamedWriteableRegistry namedWriteableRegistry = null; + ClusterService clusterServiceMock = null; + + @Before + public void setupData() { + node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); + node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); + pitId = CreatePitControllerTests.getPitId(); + namedWriteableRegistry = new NamedWriteableRegistry( + Arrays.asList( + new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, MatchAllQueryBuilder.NAME, MatchAllQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, IdsQueryBuilder.NAME, IdsQueryBuilder::new) + ) + ); + nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); + transportSearchAction = mock(TransportSearchAction.class); + task = new Task( + randomLong(), + "transport", + SearchAction.NAME, + "description", + new TaskId(randomLong() + ":" + randomLong()), + Collections.emptyMap() + ); + InternalSearchResponse response = new InternalSearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), + InternalAggregations.EMPTY, + null, + null, + false, + null, + 1 + ); + + clusterServiceMock = mock(ClusterService.class); + ClusterState state = mock(ClusterState.class); + + final Settings keepAliveSettings = Settings.builder() + .put(CreatePITController.CREATE_PIT_TEMPORARY_KEEPALIVE_SETTING.getKey(), 30000) + .build(); + when(clusterServiceMock.getSettings()).thenReturn(keepAliveSettings); + + when(state.getMetadata()).thenReturn(Metadata.EMPTY_METADATA); + when(state.metadata()).thenReturn(Metadata.EMPTY_METADATA); + when(clusterServiceMock.state()).thenReturn(state); + when(state.getNodes()).thenReturn(nodes); + } + + /** + * Test if transport call for update pit is made to all nodes present as part of PIT ID returned from phase one of create pit + */ + public void testDeletePitSuccess() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(true, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + + public void testDeleteAllPITSuccess() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(true, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + + public void testDeletePitWhenNodeIsDown() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + + public void testDeletePitWhenAllNodesAreDown() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + + public void testDeletePitFailure() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(false))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + + public void testDeleteAllPitWhenNodeIsDown() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + @Override + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + + public void testDeleteAllPitWhenAllNodesAreDown() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + + @Override + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onFailure(new Exception("node down"))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + + public void testDeleteAllPitFailure() throws InterruptedException, ExecutionException { + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + ThreadPool threadPool = new ThreadPool(settings); + try { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { + + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(false))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), + null, + Collections.emptySet() + ) { + @Override + public TaskManager getTaskManager() { + return taskManager; + } + }; + TransportDeletePITAction action = new TransportDeletePITAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePITResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } finally { + assertTrue(OpenSearchTestCase.terminate(threadPool)); + } + } + +} diff --git a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java index a117e891c6e6d..9bfb3a76220bc 100644 --- a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java +++ b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java @@ -76,6 +76,24 @@ public void testDeletePit() throws Exception { } + public void testDeleteAllPits() throws Exception { + createPitOnIndex("index"); + createIndex("index1", Settings.builder().put("index.number_of_shards", 5).put("index.number_of_replicas", 1).build()); + client().prepareIndex("index1").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); + ensureGreen(); + createPitOnIndex("index1"); + DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + + /** + * When we invoke delete again, returns success after clearing the remaining readers. Asserting reader context + * not found exceptions don't result in failures ( as deletion in one node is successful ) + */ + ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + DeletePITResponse deletePITResponse = execute.get(); + assertTrue(deletePITResponse.isSucceeded()); + client().admin().indices().prepareDelete("index1").get(); + } + public void testDeletePitWhileNodeDrop() throws Exception { CreatePITResponse pitResponse = createPitOnIndex("index"); createIndex("index1", Settings.builder().put("index.number_of_shards", 5).put("index.number_of_replicas", 1).build()); From 68e210dbb07091e1061184818b207bdca1c6f85d Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Wed, 4 May 2022 09:45:24 +0530 Subject: [PATCH 08/19] Adding java docs and addressing comments Signed-off-by: Bharathwaj G --- .../action/search/DeletePITAction.java | 3 + .../action/search/DeletePITResponse.java | 3 + .../action/search/RestDeletePITAction.java | 30 +++++---- .../search/TransportDeletePITActionTests.java | 3 + .../search/DeletePitMultiNodeTests.java | 3 + .../search/pit/RestDeletePitActionTests.java | 66 ++++++++++++++++--- 6 files changed, 87 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITAction.java b/server/src/main/java/org/opensearch/action/search/DeletePITAction.java index 7f043a365c403..6048996037bbe 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePITAction.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePITAction.java @@ -10,6 +10,9 @@ import org.opensearch.action.ActionType; +/** + * Action type for deleting PIT reader contexts + */ public class DeletePITAction extends ActionType { public static final DeletePITAction INSTANCE = new DeletePITAction(); diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java b/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java index 220f5377bc1ce..5c3f66b0ad293 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java @@ -26,6 +26,9 @@ import static org.opensearch.rest.RestStatus.NOT_FOUND; import static org.opensearch.rest.RestStatus.OK; +/** + * Response class for delete pit flow which returns if the contexts are freed + */ public class DeletePITResponse extends ActionResponse implements StatusToXContentObject { /** diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java index 26739d3749f92..0c4fb28773621 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java @@ -11,7 +11,6 @@ import org.opensearch.action.search.DeletePITRequest; import org.opensearch.action.search.DeletePITResponse; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.Strings; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestStatusToXContentListener; @@ -23,6 +22,9 @@ import static java.util.Collections.unmodifiableList; import static org.opensearch.rest.RestRequest.Method.DELETE; +/** + * Rest action for deleting PIT contexts + */ public class RestDeletePITAction extends BaseRestHandler { @Override @@ -32,24 +34,26 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - String pitIds = request.param("pit_id"); + String allPitIdsQualifier = "_all"; DeletePITRequest deletePITRequest = new DeletePITRequest(); - deletePITRequest.setPitIds(asList(Strings.splitStringByCommaToArray(pitIds))); - request.withContentOrSourceParamParserOrNull((xContentParser -> { - if (xContentParser != null) { - // NOTE: if rest request with xcontent body has request parameters, values parsed from request body have the precedence - try { - deletePITRequest.fromXContent(xContentParser); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to parse request body", e); + if (request.path().contains(allPitIdsQualifier)) { + deletePITRequest.setPitIds(asList(allPitIdsQualifier)); + } else { + request.withContentOrSourceParamParserOrNull((xContentParser -> { + if (xContentParser != null) { + try { + deletePITRequest.fromXContent(xContentParser); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to parse request body", e); + } } - } - })); + })); + } return channel -> client.deletePit(deletePITRequest, new RestStatusToXContentListener(channel)); } @Override public List routes() { - return unmodifiableList(asList(new Route(DELETE, "/_search/_point_in_time"), new Route(DELETE, "/_search/_point_in_time/{id}"))); + return unmodifiableList(asList(new Route(DELETE, "/_search/point_in_time"), new Route(DELETE, "/_search/point_in_time/_all"))); } } diff --git a/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java index 347c5a11630de..abe2f55917969 100644 --- a/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java @@ -51,6 +51,9 @@ import static org.mockito.Mockito.when; import static org.opensearch.action.support.PlainActionFuture.newFuture; +/** + * Functional tests for transport delete pit action + */ public class TransportDeletePITActionTests extends OpenSearchTestCase { DiscoveryNode node1 = null; diff --git a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java index 9bfb3a76220bc..2836594fa4d37 100644 --- a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java +++ b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java @@ -31,6 +31,9 @@ import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +/** + * Multi node integration tests for delete PIT use cases + */ @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) public class DeletePitMultiNodeTests extends OpenSearchIntegTestCase { diff --git a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java index 1798dcdf100df..3c59fe259074a 100644 --- a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java +++ b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java @@ -27,6 +27,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +/** + * Tests to verify the behavior of rest delete pit action for list delete and delete all PIT endpoints + */ public class RestDeletePitActionTests extends OpenSearchTestCase { public void testParseDeletePitRequestWithInvalidJsonThrowsException() throws Exception { RestDeletePITAction action = new RestDeletePITAction(); @@ -38,7 +41,7 @@ public void testParseDeletePitRequestWithInvalidJsonThrowsException() throws Exc assertThat(e.getMessage(), equalTo("Failed to parse request body")); } - public void testBodyParamsOverrideQueryStringParams() throws Exception { + public void testDeletePitWithBody() throws Exception { SetOnce pitCalled = new SetOnce<>(); try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { @Override @@ -49,9 +52,29 @@ public void deletePit(DeletePITRequest request, ActionListener pitCalled = new SetOnce<>(); + try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { + @Override + public void deletePit(DeletePITRequest request, ActionListener listener) { + pitCalled.set(true); + assertThat(request.getPitIds(), hasSize(1)); + assertThat(request.getPitIds().get(0), equalTo("_all")); + } + }) { + RestDeletePITAction action = new RestDeletePITAction(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_all").build(); FakeRestChannel channel = new FakeRestChannel(request, false, 0); action.handleRequest(request, channel, nodeClient); @@ -59,7 +82,32 @@ public void deletePit(DeletePITRequest request, ActionListener pitCalled = new SetOnce<>(); + try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { + @Override + public void deletePit(DeletePITRequest request, ActionListener listener) { + pitCalled.set(true); + assertThat(request.getPitIds(), hasSize(1)); + assertThat(request.getPitIds().get(0), equalTo("_all")); + } + }) { + RestDeletePITAction action = new RestDeletePITAction(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray("{\"pit_id\": [\"BODY\"]}"), + XContentType.JSON + ).withPath("/_all").build(); + FakeRestChannel channel = new FakeRestChannel(request, false, 0); + + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> action.handleRequest(request, channel, nodeClient) + ); + assertTrue(ex.getMessage().contains("request [GET /_all] does not support having a body")); + } + } + + public void testDeletePitQueryStringParamsShouldThrowException() { SetOnce pitCalled = new SetOnce<>(); try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { @Override @@ -75,9 +123,11 @@ public void deletePit(DeletePITRequest request, ActionListener action.handleRequest(request, channel, nodeClient) + ); + assertTrue(ex.getMessage().contains("unrecognized param")); } } } From 2acb46591d83c5cb19f0578e364945f60a9fe8e0 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Fri, 6 May 2022 13:39:23 +0530 Subject: [PATCH 09/19] changes to uniformly name pit Signed-off-by: Bharathwaj G --- .../org/opensearch/action/ActionModule.java | 10 +-- ...etePITAction.java => DeletePitAction.java} | 8 +-- ...ePITRequest.java => DeletePitRequest.java} | 10 +-- ...ITResponse.java => DeletePitResponse.java} | 14 ++-- .../action/search/SearchTransportService.java | 10 +-- ...ion.java => TransportDeletePitAction.java} | 22 +++--- .../java/org/opensearch/client/Client.java | 6 +- .../client/support/AbstractClient.java | 10 +-- ...ITAction.java => RestDeletePitAction.java} | 10 +-- ...ava => TransportDeletePitActionTests.java} | 68 +++++++++---------- .../search/DeletePitMultiNodeTests.java | 46 ++++++------- .../search/pit/RestDeletePitActionTests.java | 24 +++---- 12 files changed, 119 insertions(+), 119 deletions(-) rename server/src/main/java/org/opensearch/action/search/{DeletePITAction.java => DeletePitAction.java} (67%) rename server/src/main/java/org/opensearch/action/search/{DeletePITRequest.java => DeletePitRequest.java} (94%) rename server/src/main/java/org/opensearch/action/search/{DeletePITResponse.java => DeletePitResponse.java} (86%) rename server/src/main/java/org/opensearch/action/search/{TransportDeletePITAction.java => TransportDeletePitAction.java} (90%) rename server/src/main/java/org/opensearch/rest/action/search/{RestDeletePITAction.java => RestDeletePitAction.java} (85%) rename server/src/test/java/org/opensearch/action/search/{TransportDeletePITActionTests.java => TransportDeletePitActionTests.java} (92%) diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index b659d42acac50..575357adfa11d 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -237,13 +237,13 @@ import org.opensearch.action.main.TransportMainAction; import org.opensearch.action.search.ClearScrollAction; import org.opensearch.action.search.CreatePitAction; -import org.opensearch.action.search.DeletePITAction; +import org.opensearch.action.search.DeletePitAction; import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.SearchAction; import org.opensearch.action.search.SearchScrollAction; import org.opensearch.action.search.TransportClearScrollAction; import org.opensearch.action.search.TransportCreatePitAction; -import org.opensearch.action.search.TransportDeletePITAction; +import org.opensearch.action.search.TransportDeletePitAction; import org.opensearch.action.search.TransportMultiSearchAction; import org.opensearch.action.search.TransportSearchAction; import org.opensearch.action.search.TransportSearchScrollAction; @@ -405,7 +405,7 @@ import org.opensearch.rest.action.search.RestClearScrollAction; import org.opensearch.rest.action.search.RestCountAction; import org.opensearch.rest.action.search.RestCreatePitAction; -import org.opensearch.rest.action.search.RestDeletePITAction; +import org.opensearch.rest.action.search.RestDeletePitAction; import org.opensearch.rest.action.search.RestExplainAction; import org.opensearch.rest.action.search.RestMultiSearchAction; import org.opensearch.rest.action.search.RestSearchAction; @@ -667,7 +667,7 @@ public void reg actions.register(DeleteDanglingIndexAction.INSTANCE, TransportDeleteDanglingIndexAction.class); actions.register(FindDanglingIndexAction.INSTANCE, TransportFindDanglingIndexAction.class); actions.register(CreatePitAction.INSTANCE, TransportCreatePitAction.class); - actions.register(DeletePITAction.INSTANCE, TransportDeletePITAction.class); + actions.register(DeletePitAction.INSTANCE, TransportDeletePitAction.class); return unmodifiableMap(actions.getRegistry()); } @@ -843,7 +843,7 @@ public void initRestHandlers(Supplier nodesInCluster) { // Point in time API registerHandler.accept(new RestCreatePitAction()); - registerHandler.accept(new RestDeletePITAction()); + registerHandler.accept(new RestDeletePitAction()); for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers( settings, diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITAction.java b/server/src/main/java/org/opensearch/action/search/DeletePitAction.java similarity index 67% rename from server/src/main/java/org/opensearch/action/search/DeletePITAction.java rename to server/src/main/java/org/opensearch/action/search/DeletePitAction.java index 6048996037bbe..564f0ec6f1e3c 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePITAction.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePitAction.java @@ -13,12 +13,12 @@ /** * Action type for deleting PIT reader contexts */ -public class DeletePITAction extends ActionType { +public class DeletePitAction extends ActionType { - public static final DeletePITAction INSTANCE = new DeletePITAction(); + public static final DeletePitAction INSTANCE = new DeletePitAction(); public static final String NAME = "indices:admin/delete/pit"; - private DeletePITAction() { - super(NAME, DeletePITResponse::new); + private DeletePitAction() { + super(NAME, DeletePitResponse::new); } } diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java b/server/src/main/java/org/opensearch/action/search/DeletePitRequest.java similarity index 94% rename from server/src/main/java/org/opensearch/action/search/DeletePITRequest.java rename to server/src/main/java/org/opensearch/action/search/DeletePitRequest.java index 04b3aeb0f6a07..88f6f1daf3eaf 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePITRequest.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePitRequest.java @@ -27,31 +27,31 @@ /** * Request to delete one or more PIT contexts based on IDs. */ -public class DeletePITRequest extends ActionRequest implements ToXContentObject { +public class DeletePitRequest extends ActionRequest implements ToXContentObject { /** * List of PIT IDs to be deleted , and use "_all" to delete all PIT reader contexts */ private List pitIds; - public DeletePITRequest(StreamInput in) throws IOException { + public DeletePitRequest(StreamInput in) throws IOException { super(in); pitIds = Arrays.asList(in.readStringArray()); } - public DeletePITRequest(String... pitIds) { + public DeletePitRequest(String... pitIds) { if (pitIds != null) { this.pitIds = Arrays.asList(pitIds); } } - public DeletePITRequest(List pitIds) { + public DeletePitRequest(List pitIds) { if (pitIds != null) { this.pitIds = pitIds; } } - public DeletePITRequest() {} + public DeletePitRequest() {} public List getPitIds() { return pitIds; diff --git a/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java b/server/src/main/java/org/opensearch/action/search/DeletePitResponse.java similarity index 86% rename from server/src/main/java/org/opensearch/action/search/DeletePITResponse.java rename to server/src/main/java/org/opensearch/action/search/DeletePitResponse.java index 5c3f66b0ad293..44fef162af623 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePITResponse.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePitResponse.java @@ -29,18 +29,18 @@ /** * Response class for delete pit flow which returns if the contexts are freed */ -public class DeletePITResponse extends ActionResponse implements StatusToXContentObject { +public class DeletePitResponse extends ActionResponse implements StatusToXContentObject { /** * This will be true if all PIT reader contexts are deleted. */ private final boolean succeeded; - public DeletePITResponse(boolean succeeded) { + public DeletePitResponse(boolean succeeded) { this.succeeded = succeeded; } - public DeletePITResponse(StreamInput in) throws IOException { + public DeletePitResponse(StreamInput in) throws IOException { super(in); succeeded = in.readBoolean(); } @@ -64,10 +64,10 @@ public void writeTo(StreamOutput out) throws IOException { private static final ParseField SUCCEEDED = new ParseField("succeeded"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "delete_pit", true, - a -> new DeletePITResponse((boolean) a[0]) + a -> new DeletePitResponse((boolean) a[0]) ); static { PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), SUCCEEDED, ObjectParser.ValueType.BOOLEAN); @@ -82,9 +82,9 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par } /** - * Parse the delete PIT response body into a new {@link DeletePITResponse} object + * Parse the delete PIT response body into a new {@link DeletePitResponse} object */ - public static DeletePITResponse fromXContent(XContentParser parser) throws IOException { + public static DeletePitResponse fromXContent(XContentParser parser) throws IOException { return PARSER.apply(parser, null); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java index 454068f838ee9..6c11fe95cfbe0 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java @@ -208,7 +208,7 @@ public void sendFreePITContext( transportService.sendRequest( connection, FREE_PIT_CONTEXT_ACTION_NAME, - new PITFreeContextRequest(contextId), + new PitFreeContextRequest(contextId), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new) ); @@ -389,14 +389,14 @@ public ShardSearchContextId id() { } - static class PITFreeContextRequest extends TransportRequest { + static class PitFreeContextRequest extends TransportRequest { private ShardSearchContextId contextId; - PITFreeContextRequest(ShardSearchContextId contextId) { + PitFreeContextRequest(ShardSearchContextId contextId) { this.contextId = Objects.requireNonNull(contextId); } - PITFreeContextRequest(StreamInput in) throws IOException { + PitFreeContextRequest(StreamInput in) throws IOException { super(in); contextId = new ShardSearchContextId(in); } @@ -491,7 +491,7 @@ public static void registerRequestHandler(TransportService transportService, Sea transportService.registerRequestHandler( FREE_PIT_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, - PITFreeContextRequest::new, + PitFreeContextRequest::new, (request, channel, task) -> { boolean freed = searchService.freeReaderContextIfFound(request.id()); channel.sendResponse(new SearchFreeContextResponse(freed)); diff --git a/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java similarity index 90% rename from server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java rename to server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java index 5785148913337..44d44fe4df939 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportDeletePITAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java @@ -37,15 +37,15 @@ /** * Transport action for deleting pit reader context - supports deleting list and all pit contexts */ -public class TransportDeletePITAction extends HandledTransportAction { +public class TransportDeletePitAction extends HandledTransportAction { private final NamedWriteableRegistry namedWriteableRegistry; private TransportSearchAction transportSearchAction; private final ClusterService clusterService; private final SearchTransportService searchTransportService; - private static final Logger logger = LogManager.getLogger(TransportDeletePITAction.class); + private static final Logger logger = LogManager.getLogger(TransportDeletePitAction.class); @Inject - public TransportDeletePITAction( + public TransportDeletePitAction( TransportService transportService, ActionFilters actionFilters, NamedWriteableRegistry namedWriteableRegistry, @@ -53,7 +53,7 @@ public TransportDeletePITAction( ClusterService clusterService, SearchTransportService searchTransportService ) { - super(DeletePITAction.NAME, transportService, actionFilters, DeletePITRequest::new); + super(DeletePitAction.NAME, transportService, actionFilters, DeletePitRequest::new); this.namedWriteableRegistry = namedWriteableRegistry; this.transportSearchAction = transportSearchAction; this.clusterService = clusterService; @@ -61,7 +61,7 @@ public TransportDeletePITAction( } @Override - protected void doExecute(Task task, DeletePITRequest request, ActionListener listener) { + protected void doExecute(Task task, DeletePitRequest request, ActionListener listener) { List contexts = new ArrayList<>(); List pitIds = request.getPitIds(); if (pitIds.size() == 1 && "_all".equals(pitIds.get(0))) { @@ -73,16 +73,16 @@ protected void doExecute(Task task, DeletePITRequest request, ActionListener { if (r == contexts.size()) { - listener.onResponse(new DeletePITResponse(true)); + listener.onResponse(new DeletePitResponse(true)); } else { logger.debug( () -> new ParameterizedMessage("Delete PITs failed. " + "Cleared {} contexts out of {}", r, contexts.size()) ); - listener.onResponse(new DeletePITResponse(false)); + listener.onResponse(new DeletePitResponse(false)); } }, e -> { logger.debug("Delete PITs failed ", e); - listener.onResponse(new DeletePITResponse(false)); + listener.onResponse(new DeletePitResponse(false)); })); } } @@ -90,7 +90,7 @@ protected void doExecute(Task task, DeletePITRequest request, ActionListener listener) { + void deleteAllPits(ActionListener listener) { int size = clusterService.state().getNodes().getSize(); ActionListener groupedActionListener = new GroupedActionListener( new ActionListener<>() { @@ -104,13 +104,13 @@ public void onResponse(final Collection listener); - void deletePit(DeletePITRequest deletePITRequest, ActionListener listener); + void deletePit(DeletePitRequest deletePITRequest, ActionListener listener); /** * Performs multiple search requests. diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 5f597315eae45..bcebd1a6e6ae3 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -334,9 +334,9 @@ import org.opensearch.action.search.CreatePitAction; import org.opensearch.action.search.CreatePitRequest; import org.opensearch.action.search.CreatePitResponse; -import org.opensearch.action.search.DeletePITAction; -import org.opensearch.action.search.DeletePITRequest; -import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.action.search.DeletePitAction; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.DeletePitResponse; import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchRequestBuilder; @@ -588,8 +588,8 @@ public void createPit(final CreatePitRequest createPITRequest, final ActionListe } @Override - public void deletePit(final DeletePITRequest deletePITRequest, final ActionListener listener) { - execute(DeletePITAction.INSTANCE, deletePITRequest, listener); + public void deletePit(final DeletePitRequest deletePITRequest, final ActionListener listener) { + execute(DeletePitAction.INSTANCE, deletePITRequest, listener); } @Override diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePitAction.java similarity index 85% rename from server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java rename to server/src/main/java/org/opensearch/rest/action/search/RestDeletePitAction.java index 0c4fb28773621..aecea031f9e71 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestDeletePITAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestDeletePitAction.java @@ -8,8 +8,8 @@ package org.opensearch.rest.action.search; -import org.opensearch.action.search.DeletePITRequest; -import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.DeletePitResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -25,7 +25,7 @@ /** * Rest action for deleting PIT contexts */ -public class RestDeletePITAction extends BaseRestHandler { +public class RestDeletePitAction extends BaseRestHandler { @Override public String getName() { @@ -35,7 +35,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { String allPitIdsQualifier = "_all"; - DeletePITRequest deletePITRequest = new DeletePITRequest(); + DeletePitRequest deletePITRequest = new DeletePitRequest(); if (request.path().contains(allPitIdsQualifier)) { deletePITRequest.setPitIds(asList(allPitIdsQualifier)); } else { @@ -49,7 +49,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client } })); } - return channel -> client.deletePit(deletePITRequest, new RestStatusToXContentListener(channel)); + return channel -> client.deletePit(deletePITRequest, new RestStatusToXContentListener(channel)); } @Override diff --git a/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java similarity index 92% rename from server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java rename to server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java index abe2f55917969..32c1094a9cce5 100644 --- a/server/src/test/java/org/opensearch/action/search/TransportDeletePITActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java @@ -54,7 +54,7 @@ /** * Functional tests for transport delete pit action */ -public class TransportDeletePITActionTests extends OpenSearchTestCase { +public class TransportDeletePitActionTests extends OpenSearchTestCase { DiscoveryNode node1 = null; DiscoveryNode node2 = null; @@ -103,7 +103,7 @@ public void setupData() { ClusterState state = mock(ClusterState.class); final Settings keepAliveSettings = Settings.builder() - .put(CreatePITController.CREATE_PIT_TEMPORARY_KEEPALIVE_SETTING.getKey(), 30000) + .put(CreatePitController.CREATE_PIT_TEMPORARY_KEEPALIVE_SETTING.getKey(), 30000) .build(); when(clusterServiceMock.getSettings()).thenReturn(keepAliveSettings); @@ -156,7 +156,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -164,10 +164,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(true, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { @@ -209,7 +209,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -217,10 +217,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(true, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { @@ -273,7 +273,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -281,10 +281,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(false, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { @@ -331,7 +331,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -339,10 +339,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(false, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { @@ -395,7 +395,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -403,10 +403,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest(pitId); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(false, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { @@ -453,7 +453,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -461,10 +461,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(false, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { @@ -507,7 +507,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -515,10 +515,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(false, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { @@ -565,7 +565,7 @@ public TaskManager getTaskManager() { return taskManager; } }; - TransportDeletePITAction action = new TransportDeletePITAction( + TransportDeletePitAction action = new TransportDeletePitAction( transportService, actionFilters, namedWriteableRegistry, @@ -573,10 +573,10 @@ public TaskManager getTaskManager() { clusterServiceMock, searchTransportService ); - DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); - PlainActionFuture future = newFuture(); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); action.execute(task, deletePITRequest, future); - DeletePITResponse dr = future.get(); + DeletePitResponse dr = future.get(); assertEquals(false, dr.isSucceeded()); assertEquals(3, deleteNodesInvoked.size()); } finally { diff --git a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java index 2836594fa4d37..b0fc991fe4ecb 100644 --- a/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java +++ b/server/src/test/java/org/opensearch/search/DeletePitMultiNodeTests.java @@ -14,9 +14,9 @@ import org.opensearch.action.search.CreatePITAction; import org.opensearch.action.search.CreatePITRequest; import org.opensearch.action.search.CreatePITResponse; -import org.opensearch.action.search.DeletePITAction; -import org.opensearch.action.search.DeletePITRequest; -import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.action.search.DeletePitAction; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.DeletePitResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.search.builder.PointInTimeBuilder; @@ -66,14 +66,14 @@ public void testDeletePit() throws Exception { execute = client().execute(CreatePITAction.INSTANCE, request); pitResponse = execute.get(); pitIds.add(pitResponse.getId()); - DeletePITRequest deletePITRequest = new DeletePITRequest(pitIds); - ActionFuture deleteExecute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); - DeletePITResponse deletePITResponse = deleteExecute.get(); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitIds); + ActionFuture deleteExecute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); + DeletePitResponse deletePITResponse = deleteExecute.get(); assertTrue(deletePITResponse.isSucceeded()); /** * Checking deleting the same PIT id again results in succeeded */ - deleteExecute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); + deleteExecute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); deletePITResponse = deleteExecute.get(); assertTrue(deletePITResponse.isSucceeded()); @@ -85,14 +85,14 @@ public void testDeleteAllPits() throws Exception { client().prepareIndex("index1").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); ensureGreen(); createPitOnIndex("index1"); - DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); /** * When we invoke delete again, returns success after clearing the remaining readers. Asserting reader context * not found exceptions don't result in failures ( as deletion in one node is successful ) */ - ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); - DeletePITResponse deletePITResponse = execute.get(); + ActionFuture execute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); + DeletePitResponse deletePITResponse = execute.get(); assertTrue(deletePITResponse.isSucceeded()); client().admin().indices().prepareDelete("index1").get(); } @@ -106,12 +106,12 @@ public void testDeletePitWhileNodeDrop() throws Exception { pitIds.add(pitResponse.getId()); CreatePITResponse pitResponse1 = createPitOnIndex("index1"); pitIds.add(pitResponse1.getId()); - DeletePITRequest deletePITRequest = new DeletePITRequest(pitIds); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitIds); internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { - ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); - DeletePITResponse deletePITResponse = execute.get(); + ActionFuture execute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); + DeletePitResponse deletePITResponse = execute.get(); assertFalse(deletePITResponse.isSucceeded()); return super.onNodeStopped(nodeName); } @@ -122,8 +122,8 @@ public Settings onNodeStopped(String nodeName) throws Exception { * When we invoke delete again, returns success after clearing the remaining readers. Asserting reader context * not found exceptions don't result in failures ( as deletion in one node is successful ) */ - ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); - DeletePITResponse deletePITResponse = execute.get(); + ActionFuture execute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); + DeletePitResponse deletePITResponse = execute.get(); assertTrue(deletePITResponse.isSucceeded()); client().admin().indices().prepareDelete("index1").get(); } @@ -133,12 +133,12 @@ public void testDeleteAllPitsWhileNodeDrop() throws Exception { createIndex("index1", Settings.builder().put("index.number_of_shards", 5).put("index.number_of_replicas", 1).build()); client().prepareIndex("index1").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); ensureGreen(); - DeletePITRequest deletePITRequest = new DeletePITRequest("_all"); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { - ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); - DeletePITResponse deletePITResponse = execute.get(); + ActionFuture execute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); + DeletePitResponse deletePITResponse = execute.get(); assertFalse(deletePITResponse.isSucceeded()); return super.onNodeStopped(nodeName); } @@ -149,8 +149,8 @@ public Settings onNodeStopped(String nodeName) throws Exception { * When we invoke delete again, returns success after clearing the remaining readers. Asserting reader context * not found exceptions don't result in failures ( as deletion in one node is successful ) */ - ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); - DeletePITResponse deletePITResponse = execute.get(); + ActionFuture execute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); + DeletePitResponse deletePITResponse = execute.get(); assertTrue(deletePITResponse.isSucceeded()); client().admin().indices().prepareDelete("index1").get(); } @@ -160,7 +160,7 @@ public void testDeleteWhileSearch() throws Exception { ensureGreen(); List pitIds = new ArrayList<>(); pitIds.add(pitResponse.getId()); - DeletePITRequest deletePITRequest = new DeletePITRequest(pitIds); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitIds); Thread[] threads = new Thread[5]; CountDownLatch latch = new CountDownLatch(threads.length); final AtomicBoolean deleted = new AtomicBoolean(false); @@ -191,8 +191,8 @@ public void testDeleteWhileSearch() throws Exception { threads[i].setName("opensearch[node_s_0][search]"); threads[i].start(); } - ActionFuture execute = client().execute(DeletePITAction.INSTANCE, deletePITRequest); - DeletePITResponse deletePITResponse = execute.get(); + ActionFuture execute = client().execute(DeletePitAction.INSTANCE, deletePITRequest); + DeletePitResponse deletePITResponse = execute.get(); deleted.set(true); assertTrue(deletePITResponse.isSucceeded()); diff --git a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java index 3c59fe259074a..f73935ee1af92 100644 --- a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java +++ b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java @@ -10,13 +10,13 @@ import org.apache.lucene.util.SetOnce; import org.opensearch.action.ActionListener; -import org.opensearch.action.search.DeletePITRequest; -import org.opensearch.action.search.DeletePITResponse; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.DeletePitResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.xcontent.XContentType; import org.opensearch.rest.RestRequest; -import org.opensearch.rest.action.search.RestDeletePITAction; +import org.opensearch.rest.action.search.RestDeletePitAction; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.client.NoOpNodeClient; import org.opensearch.test.rest.FakeRestChannel; @@ -32,7 +32,7 @@ */ public class RestDeletePitActionTests extends OpenSearchTestCase { public void testParseDeletePitRequestWithInvalidJsonThrowsException() throws Exception { - RestDeletePITAction action = new RestDeletePITAction(); + RestDeletePitAction action = new RestDeletePitAction(); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( new BytesArray("{invalid_json}"), XContentType.JSON @@ -45,13 +45,13 @@ public void testDeletePitWithBody() throws Exception { SetOnce pitCalled = new SetOnce<>(); try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { @Override - public void deletePit(DeletePITRequest request, ActionListener listener) { + public void deletePit(DeletePitRequest request, ActionListener listener) { pitCalled.set(true); assertThat(request.getPitIds(), hasSize(1)); assertThat(request.getPitIds().get(0), equalTo("BODY")); } }) { - RestDeletePITAction action = new RestDeletePITAction(); + RestDeletePitAction action = new RestDeletePitAction(); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( new BytesArray("{\"pit_id\": [\"BODY\"]}"), XContentType.JSON @@ -67,13 +67,13 @@ public void testDeleteAllPit() throws Exception { SetOnce pitCalled = new SetOnce<>(); try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { @Override - public void deletePit(DeletePITRequest request, ActionListener listener) { + public void deletePit(DeletePitRequest request, ActionListener listener) { pitCalled.set(true); assertThat(request.getPitIds(), hasSize(1)); assertThat(request.getPitIds().get(0), equalTo("_all")); } }) { - RestDeletePITAction action = new RestDeletePITAction(); + RestDeletePitAction action = new RestDeletePitAction(); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_all").build(); FakeRestChannel channel = new FakeRestChannel(request, false, 0); action.handleRequest(request, channel, nodeClient); @@ -86,13 +86,13 @@ public void testDeleteAllPitWithBody() throws Exception { SetOnce pitCalled = new SetOnce<>(); try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { @Override - public void deletePit(DeletePITRequest request, ActionListener listener) { + public void deletePit(DeletePitRequest request, ActionListener listener) { pitCalled.set(true); assertThat(request.getPitIds(), hasSize(1)); assertThat(request.getPitIds().get(0), equalTo("_all")); } }) { - RestDeletePITAction action = new RestDeletePITAction(); + RestDeletePitAction action = new RestDeletePitAction(); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( new BytesArray("{\"pit_id\": [\"BODY\"]}"), XContentType.JSON @@ -111,14 +111,14 @@ public void testDeletePitQueryStringParamsShouldThrowException() { SetOnce pitCalled = new SetOnce<>(); try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { @Override - public void deletePit(DeletePITRequest request, ActionListener listener) { + public void deletePit(DeletePitRequest request, ActionListener listener) { pitCalled.set(true); assertThat(request.getPitIds(), hasSize(2)); assertThat(request.getPitIds().get(0), equalTo("QUERY_STRING")); assertThat(request.getPitIds().get(1), equalTo("QUERY_STRING_1")); } }) { - RestDeletePITAction action = new RestDeletePITAction(); + RestDeletePitAction action = new RestDeletePitAction(); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams( Collections.singletonMap("pit_id", "QUERY_STRING,QUERY_STRING_1") ).build(); From 648402e2a7d16131b918111098fae099811db83d Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Tue, 10 May 2022 23:04:32 +0530 Subject: [PATCH 10/19] Addressing comments Signed-off-by: Bharathwaj G --- .../action/search/CreatePitController.java | 11 +++++++++-- .../opensearch/search/internal/PitReaderContext.java | 6 +++--- .../opensearch/search/internal/ReaderContext.java | 12 ++++++++++-- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/search/CreatePitController.java b/server/src/main/java/org/opensearch/action/search/CreatePitController.java index 68f465c377154..1e74ab1922d8c 100644 --- a/server/src/main/java/org/opensearch/action/search/CreatePitController.java +++ b/server/src/main/java/org/opensearch/action/search/CreatePitController.java @@ -126,7 +126,9 @@ public void executeCreatePit(StepListener createPitListener, Act * Creates PIT reader context with temporary keep alive */ void executeCreatePit(Task task, SearchRequest searchRequest, StepListener createPitListener) { - logger.debug("Executing creation of PIT context for indices [{}]", Arrays.toString(searchRequest.indices())); + logger.debug( + () -> new ParameterizedMessage("Executing creation of PIT context for indices [{}]", Arrays.toString(searchRequest.indices())) + ); transportSearchAction.executeRequest( task, searchRequest, @@ -160,7 +162,12 @@ void executeUpdatePitId( SearchResponse searchResponse, ActionListener updatePitIdListener ) { - logger.debug("Updating PIT context with PIT ID [{}], creation time and keep alive", searchResponse.pointInTimeId()); + logger.debug( + () -> new ParameterizedMessage( + "Updating PIT context with PIT ID [{}], creation time and keep alive", + searchResponse.pointInTimeId() + ) + ); /** * store the create time ( same create time for all PIT contexts across shards ) to be used * for list PIT api diff --git a/server/src/main/java/org/opensearch/search/internal/PitReaderContext.java b/server/src/main/java/org/opensearch/search/internal/PitReaderContext.java index 5c19d54327fea..43ca7e0ebd823 100644 --- a/server/src/main/java/org/opensearch/search/internal/PitReaderContext.java +++ b/server/src/main/java/org/opensearch/search/internal/PitReaderContext.java @@ -50,13 +50,13 @@ public void setPitId(final String pitId) { * keepAliveInMillis. */ public Releasable updatePitIdAndKeepAlive(long keepAliveInMillis, String pitId, long createTime) { - refCounted.incRef(); + getRefCounted().incRef(); tryUpdateKeepAlive(keepAliveInMillis); setPitId(pitId); setCreationTime(createTime); return Releasables.releaseOnce(() -> { - this.lastAccessTime.updateAndGet(curr -> Math.max(curr, nowInMillis())); - refCounted.decRef(); + getLastAccessTime().updateAndGet(curr -> Math.max(curr, nowInMillis())); + getRefCounted().decRef(); }); } diff --git a/server/src/main/java/org/opensearch/search/internal/ReaderContext.java b/server/src/main/java/org/opensearch/search/internal/ReaderContext.java index 6342051ce4405..7a7ee27d36d56 100644 --- a/server/src/main/java/org/opensearch/search/internal/ReaderContext.java +++ b/server/src/main/java/org/opensearch/search/internal/ReaderContext.java @@ -66,9 +66,9 @@ public class ReaderContext implements Releasable { private final boolean singleSession; private final AtomicLong keepAlive; - protected final AtomicLong lastAccessTime; + private final AtomicLong lastAccessTime; // For reference why we use RefCounted here see https://github.com/elastic/elasticsearch/pull/20095. - protected final AbstractRefCounted refCounted; + private final AbstractRefCounted refCounted; private final List onCloses = new CopyOnWriteArrayList<>(); @@ -107,6 +107,14 @@ protected long nowInMillis() { return indexShard.getThreadPool().relativeTimeInMillis(); } + protected AbstractRefCounted getRefCounted() { + return refCounted; + } + + protected AtomicLong getLastAccessTime() { + return lastAccessTime; + } + @Override public final void close() { if (closed.compareAndSet(false, true)) { From 04532f4ba6ccf28c24ab8f10d92bdf702f1b487c Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Wed, 11 May 2022 15:57:10 +0530 Subject: [PATCH 11/19] adding tests and comments Signed-off-by: Bharathwaj G --- .../action/search/DeletePitAction.java | 2 +- .../action/search/SearchTransportService.java | 3 +++ .../search/TransportDeletePitAction.java | 23 +++++++++++-------- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/search/DeletePitAction.java b/server/src/main/java/org/opensearch/action/search/DeletePitAction.java index 564f0ec6f1e3c..2774fb5ef7648 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePitAction.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePitAction.java @@ -16,7 +16,7 @@ public class DeletePitAction extends ActionType { public static final DeletePitAction INSTANCE = new DeletePitAction(); - public static final String NAME = "indices:admin/delete/pit"; + public static final String NAME = "indices:admin/read/pit/delete"; private DeletePitAction() { super(NAME, DeletePitResponse::new); diff --git a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java index 6c11fe95cfbe0..ee8b51c73ef01 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java @@ -389,6 +389,9 @@ public ShardSearchContextId id() { } + /** + * Request to free the PIT context based on id + */ static class PitFreeContextRequest extends TransportRequest { private ShardSearchContextId contextId; diff --git a/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java index 44d44fe4df939..731e776e52191 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java @@ -60,6 +60,9 @@ public TransportDeletePitAction( this.searchTransportService = searchTransportService; } + /** + * Invoke delete all pits or delete list of pits workflow based on request + */ @Override protected void doExecute(Task task, DeletePitRequest request, ActionListener listener) { List contexts = new ArrayList<>(); @@ -96,15 +99,17 @@ void deleteAllPits(ActionListener listener) { new ActionListener<>() { @Override public void onResponse(final Collection responses) { - final SetOnce succeeded = new SetOnce<>(); - for (SearchTransportService.SearchFreeContextResponse response : responses) { - if (!response.isFreed()) { - succeeded.set(false); - break; - } - } - succeeded.trySet(true); - listener.onResponse(new DeletePitResponse(succeeded.get())); + //final SetOnce succeeded = new SetOnce<>(); + boolean hasFailures = responses.stream().anyMatch(r-> !r.isFreed()); + listener.onResponse(new DeletePitResponse(!hasFailures)); +// for (SearchTransportService.SearchFreeContextResponse response : responses) { +// if (!response.isFreed()) { +// succeeded.set(false); +// break; +// } +// } +// succeeded.trySet(true); +// listener.onResponse(new DeletePitResponse(succeeded.get())); } @Override From 6ceaf615e220f821ddc3e36bb41b82c80e595fe7 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Fri, 13 May 2022 11:50:29 +0530 Subject: [PATCH 12/19] addressing comments Signed-off-by: Bharathwaj G --- .../opensearch/client/RequestConverters.java | 2 +- .../action/search/CreatePitController.java | 23 +++++----- .../opensearch/action/search/SearchUtils.java | 42 +++++++++++++++++++ .../org/opensearch/search/SearchService.java | 18 +++----- .../search/CreatePitControllerTests.java | 3 +- 5 files changed, 60 insertions(+), 28 deletions(-) create mode 100644 server/src/main/java/org/opensearch/action/search/SearchUtils.java diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index 47bb6630c08db..277759c921fbf 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -439,7 +439,7 @@ static void addSearchRequestParams(Params params, SearchRequest searchRequest) { params.withIndicesOptions(searchRequest.indicesOptions()); } params.withSearchType(searchRequest.searchType().name().toLowerCase(Locale.ROOT)); - if (searchRequest.pointInTimeBuilder() == null) { + if (searchRequest.pointInTimeBuilder() != null) { params.putParam("ccs_minimize_roundtrips", "false"); } else { params.putParam("ccs_minimize_roundtrips", Boolean.toString(searchRequest.isCcsMinimizeRoundtrips())); diff --git a/server/src/main/java/org/opensearch/action/search/CreatePitController.java b/server/src/main/java/org/opensearch/action/search/CreatePitController.java index 5b20bbf2696db..4ed2e5b3de0a3 100644 --- a/server/src/main/java/org/opensearch/action/search/CreatePitController.java +++ b/server/src/main/java/org/opensearch/action/search/CreatePitController.java @@ -103,7 +103,7 @@ public void executeCreatePit(StepListener createPitListener, Act * Phase 2 of create PIT where we update pit id in pit contexts */ createPitListener.whenComplete( - searchResponse -> { executeUpdatePitId(request, searchResponse, updatePitIdListener); }, + searchResponse -> { executeUpdatePitId(request, searchRequest, searchResponse, updatePitIdListener); }, updatePitIdListener::onFailure ); } @@ -148,6 +148,7 @@ public void executeOnShardTarget( */ void executeUpdatePitId( CreatePitRequest request, + SearchRequest searchRequest, SearchResponse searchResponse, ActionListener updatePitIdListener ) { @@ -161,7 +162,13 @@ void executeUpdatePitId( * store the create time ( same create time for all PIT contexts across shards ) to be used * for list PIT api */ - final long creationTime = System.currentTimeMillis(); + final long relativeStartNanos = System.nanoTime(); + final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( + searchRequest.getOrCreateAbsoluteStartMillis(), + relativeStartNanos, + System::nanoTime + ); + final long creationTime = timeProvider.getAbsoluteStartMillis(); CreatePitResponse createPITResponse = new CreatePitResponse( searchResponse.pointInTimeId(), creationTime, @@ -212,27 +219,17 @@ void executeUpdatePitId( } } }, updatePitIdListener::onFailure); - } private StepListener> getConnectionLookupListener(SearchContextId contextId) { ClusterState state = clusterService.state(); - final Set clusters = contextId.shards() .values() .stream() .filter(ctx -> Strings.isEmpty(ctx.getClusterAlias()) == false) .map(SearchContextIdForNode::getClusterAlias) .collect(Collectors.toSet()); - - final StepListener> lookupListener = new StepListener<>(); - - if (clusters.isEmpty()) { - lookupListener.onResponse((cluster, nodeId) -> state.getNodes().get(nodeId)); - } else { - searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); - } - return lookupListener; + return SearchUtils.getConnectionLookupListener(searchTransportService, state, clusters); } private ActionListener getGroupedListener( diff --git a/server/src/main/java/org/opensearch/action/search/SearchUtils.java b/server/src/main/java/org/opensearch/action/search/SearchUtils.java new file mode 100644 index 0000000000000..0fdb1331d8ad6 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/SearchUtils.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.StepListener; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.node.DiscoveryNode; + +import java.util.Set; +import java.util.function.BiFunction; + +/** + * Helper class for common search functions + */ +public class SearchUtils { + + public SearchUtils() {} + + /** + * Get connection lookup listener for list of clusters passed + */ + public static StepListener> getConnectionLookupListener( + SearchTransportService searchTransportService, + ClusterState state, + Set clusters + ) { + final StepListener> lookupListener = new StepListener<>(); + + if (clusters.isEmpty()) { + lookupListener.onResponse((cluster, nodeId) -> state.getNodes().get(nodeId)); + } else { + searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); + } + return lookupListener; + } +} diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 4af98aad74541..a5b2e998d4361 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -872,10 +872,7 @@ public void createPitReaderContext(ShardId shardId, TimeValue keepAlive, ActionL Releasable decreasePitContexts = null; Engine.SearcherSupplier searcherSupplier = null; ReaderContext readerContext = null; - boolean success = false; try { - // use this when reader context is freed - decreasePitContexts = openPitContexts::decrementAndGet; if (openPitContexts.incrementAndGet() > maxOpenPitContext) { throw new OpenSearchRejectedExecutionException( "Trying to create too many Point In Time contexts. Must be less than or equal to: [" @@ -894,6 +891,9 @@ public void createPitReaderContext(ShardId shardId, TimeValue keepAlive, ActionL searchOperationListener.onNewReaderContext(readerContext); searchOperationListener.onNewPitContext(finalReaderContext); + + // use this when reader context is freed + decreasePitContexts = openPitContexts::decrementAndGet; readerContext.addOnClose(decreasePitContexts); decreasePitContexts = null; @@ -905,15 +905,9 @@ public void createPitReaderContext(ShardId shardId, TimeValue keepAlive, ActionL putReaderContext(readerContext); readerContext = null; listener.onResponse(finalReaderContext.id()); - success = true; } catch (Exception exc) { + Releasables.closeWhileHandlingException(searcherSupplier, readerContext, decreasePitContexts); listener.onFailure(exc); - } finally { - if (success) { - Releasables.close(readerContext, searcherSupplier, decreasePitContexts); - } else { - Releasables.closeWhileHandlingException(searcherSupplier, readerContext, decreasePitContexts); - } } }); } @@ -1043,14 +1037,14 @@ public void updatePitIdAndKeepAlive(UpdatePitContextRequest request, ActionListe Releasable updatePit = null; try { updatePit = readerContext.updatePitIdAndKeepAlive(request.getKeepAlive(), request.getPitId(), request.getCreationTime()); - updatePit.close(); listener.onResponse(new UpdatePitContextResponse(request.getPitId(), request.getCreationTime(), request.getKeepAlive())); } catch (Exception e) { freeReaderContext(readerContext.id()); + listener.onFailure(e); + } finally { if (updatePit != null) { updatePit.close(); } - listener.onFailure(e); } } diff --git a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java index 5411d6c72686d..a1521cca03640 100644 --- a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java +++ b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java @@ -126,7 +126,7 @@ public void onFailure(Exception e) { clusterServiceMock = mock(ClusterService.class); ClusterState state = mock(ClusterState.class); - final Settings keepAliveSettings = Settings.builder().put(CreatePitController.PIT_CREATE_PHASE_KEEP_ALIVE.getKey(), 30000).build(); + final Settings keepAliveSettings = Settings.builder().put(CreatePitController.PIT_INIT_KEEP_ALIVE.getKey(), 30000).build(); when(clusterServiceMock.getSettings()).thenReturn(keepAliveSettings); when(state.getMetadata()).thenReturn(Metadata.EMPTY_METADATA); @@ -177,7 +177,6 @@ public Transport.Connection getConnection(String clusterAlias, DiscoveryNode nod CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); request.setIndices(new String[] { "index" }); - CreatePitController controller = new CreatePitController( request, searchTransportService, From af9b1436959b4792c6de829c3da7778f28a5c6e4 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Tue, 17 May 2022 13:06:22 +0530 Subject: [PATCH 13/19] Rest high level clients Signed-off-by: Bharathwaj G --- .../opensearch/client/RequestConverters.java | 17 ++-- .../client/RestHighLevelClient.java | 90 +++++++++++++++++-- .../java/org/opensearch/client/PitIT.java | 30 ++++++- .../client/RequestConvertersTests.java | 29 ++++-- .../java/org/opensearch/client/SearchIT.java | 29 +++--- .../rest-api-spec/api/delete_pit.json | 28 ++++++ .../search/TransportDeletePitAction.java | 21 +++-- 7 files changed, 192 insertions(+), 52 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/delete_pit.json diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index 277759c921fbf..e8309f40b0ec7 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -53,11 +53,7 @@ import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.search.ClearScrollRequest; -import org.opensearch.action.search.CreatePitRequest; -import org.opensearch.action.search.MultiSearchRequest; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.action.search.*; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.WriteRequest; @@ -477,6 +473,17 @@ static Request createPit(CreatePitRequest createPitRequest) throws IOException { return request; } + static Request deletePit(DeletePitRequest deletePitRequest) throws IOException { + Request request = new Request(HttpDelete.METHOD_NAME, "/_search/point_in_time"); + request.setEntity(createEntity(deletePitRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request deleteAllPits(DeletePitRequest deletePitRequest) { + Request request = new Request(HttpDelete.METHOD_NAME, "/_search/point_in_time/_all"); + return request; + } + static Request clearScroll(ClearScrollRequest clearScrollRequest) throws IOException { Request request = new Request(HttpDelete.METHOD_NAME, "/_search/scroll"); request.setEntity(createEntity(clearScrollRequest, REQUEST_BODY_CONTENT_TYPE)); diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index f3360630a26b7..5d2b1e95e2ee0 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -57,15 +57,7 @@ import org.opensearch.action.get.MultiGetResponse; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.search.ClearScrollRequest; -import org.opensearch.action.search.ClearScrollResponse; -import org.opensearch.action.search.CreatePitRequest; -import org.opensearch.action.search.CreatePitResponse; -import org.opensearch.action.search.MultiSearchRequest; -import org.opensearch.action.search.MultiSearchResponse; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.action.search.*; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.action.update.UpdateRequest; import org.opensearch.action.update.UpdateResponse; @@ -1298,6 +1290,86 @@ public final Cancellable createPitAsync( ); } + /** + * Delete PIT context using delete PIT API + * + * @param deletePitRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + */ + public final DeletePitResponse deletePit(DeletePitRequest deletePitRequest, RequestOptions options) throws IOException { + return performRequestAndParseEntity( + deletePitRequest, + RequestConverters::deletePit, + options, + DeletePitResponse::fromXContent, + emptySet() + ); + } + + /** + * Asynchronously Delete PIT context using delete PIT API + * + * @param deletePitRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return the response + */ + public final Cancellable deletePitAsync( + DeletePitRequest deletePitRequest, + RequestOptions options, + ActionListener listener + ) { + return performRequestAsyncAndParseEntity( + deletePitRequest, + RequestConverters::deletePit, + options, + DeletePitResponse::fromXContent, + listener, + emptySet() + ); + } + + /** + * Delete all PIT contexts using delete all PITs API + * + * @param deletePitRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + */ + public final DeletePitResponse deleteAllPits(DeletePitRequest deletePitRequest, RequestOptions options) throws IOException { + return performRequestAndParseEntity( + deletePitRequest, + RequestConverters::deleteAllPits, + options, + DeletePitResponse::fromXContent, + emptySet() + ); + } + + /** + * Asynchronously Delete all PIT contexts using delete all PITs API + * + * @param deletePitRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return the response + */ + public final Cancellable deleteAllPitsAsync( + DeletePitRequest deletePitRequest, + RequestOptions options, + ActionListener listener + ) { + return performRequestAsyncAndParseEntity( + deletePitRequest, + RequestConverters::deleteAllPits, + options, + DeletePitResponse::fromXContent, + listener, + emptySet() + ); + } + /** * Clears one or more scroll ids using the Clear Scroll API. * diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java index 99901eabc91aa..a962655ac521f 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java @@ -13,9 +13,13 @@ import org.junit.Before; import org.opensearch.action.search.CreatePitRequest; import org.opensearch.action.search.CreatePitResponse; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.DeletePitResponse; import org.opensearch.common.unit.TimeValue; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.TimeUnit; /** @@ -51,8 +55,28 @@ public void testCreatePit() throws IOException { assertEquals(1, pitResponse.getSuccessfulShards()); assertEquals(0, pitResponse.getFailedShards()); assertEquals(0, pitResponse.getSkippedShards()); + List pitIds = new ArrayList<>(); + pitIds.add(pitResponse.getId()); + DeletePitRequest deletePitRequest = new DeletePitRequest(pitIds); + DeletePitResponse deletePitResponse = execute(deletePitRequest, highLevelClient()::deletePit, highLevelClient()::deletePitAsync); + assertTrue(deletePitResponse.isSucceeded()); } - /** - * Todo: add deletion logic and test cluster settings - */ + + public void testDeleteAllPits() throws IOException { + CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "index"); + CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + CreatePitResponse pitResponse1 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + assertTrue(pitResponse.getId() != null); + assertTrue(pitResponse1.getId() != null); + List pitIds = new ArrayList<>(); + pitIds.add("_all"); + DeletePitRequest deletePitRequest = new DeletePitRequest(pitIds); + DeletePitResponse deletePitResponse = execute( + deletePitRequest, + highLevelClient()::deleteAllPits, + highLevelClient()::deleteAllPitsAsync + ); + assertTrue(deletePitResponse.isSucceeded()); + } + } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java index 4f0b2ac0d88a1..e8dbf9b5ea403 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java @@ -52,12 +52,7 @@ import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.search.ClearScrollRequest; -import org.opensearch.action.search.CreatePitRequest; -import org.opensearch.action.search.MultiSearchRequest; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchScrollRequest; -import org.opensearch.action.search.SearchType; +import org.opensearch.action.search.*; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.WriteRequest; @@ -1326,6 +1321,28 @@ public void testCreatePit() throws IOException { assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } + public void testDeletePit() throws IOException { + List pitIds = new ArrayList<>(); + pitIds.add("pitid1"); + pitIds.add("pitid2"); + DeletePitRequest deletePitRequest = new DeletePitRequest(pitIds); + Request request = RequestConverters.deletePit(deletePitRequest); + String endpoint = "/_search/point_in_time"; + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals(endpoint, request.getEndpoint()); + assertToXContentBody(deletePitRequest, request.getEntity()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + } + + public void testDeleteAllPits() throws IOException { + DeletePitRequest deletePitRequest = new DeletePitRequest(); + Request request = RequestConverters.deletePit(deletePitRequest); + String endpoint = "/_search/point_in_time/_all"; + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals(endpoint, request.getEndpoint()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + } + public void testSearchTemplate() throws Exception { // Create a random request. String[] indices = randomIndicesNames(0, 5); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java index 01a7f892c80a1..fc7bfea247a3b 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java @@ -41,15 +41,7 @@ import org.opensearch.action.fieldcaps.FieldCapabilities; import org.opensearch.action.fieldcaps.FieldCapabilitiesRequest; import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse; -import org.opensearch.action.search.ClearScrollRequest; -import org.opensearch.action.search.ClearScrollResponse; -import org.opensearch.action.search.CreatePitRequest; -import org.opensearch.action.search.CreatePitResponse; -import org.opensearch.action.search.MultiSearchRequest; -import org.opensearch.action.search.MultiSearchResponse; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.action.search.*; import org.opensearch.client.core.CountRequest; import org.opensearch.client.core.CountResponse; import org.opensearch.common.Strings; @@ -103,11 +95,7 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.TimeUnit; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; @@ -774,16 +762,13 @@ public void testSearchWithPit() throws Exception { client().performRequest(doc); } client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh")); - CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "test"); CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35) .sort("field", SortOrder.ASC) .pointInTimeBuilder(new PointInTimeBuilder(pitResponse.getId())); SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder); SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); - try { long counter = 0; assertSearchHeader(searchResponse); @@ -793,7 +778,15 @@ public void testSearchWithPit() throws Exception { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); } } finally { - // TODO : Delete PIT + List pitIds = new ArrayList<>(); + pitIds.add(pitResponse.getId()); + DeletePitRequest deletePitRequest = new DeletePitRequest(pitIds); + DeletePitResponse deletePitResponse = execute( + deletePitRequest, + highLevelClient()::deletePit, + highLevelClient()::deletePitAsync + ); + assertTrue(deletePitResponse.isSucceeded()); } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_pit.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_pit.json new file mode 100644 index 0000000000000..34e831f78db4c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_pit.json @@ -0,0 +1,28 @@ +{ + "delete_pit":{ + "documentation":{ + "url":"https://opensearch.org/docs/latest/opensearch/rest-api/point_in_time/", + "description":"Deletes point in time context." + }, + "stability":"stable", + "url":{ + "paths":[ + { + "path":"/_search/point_in_time", + "methods":[ + "DELETE" + ] + }, + { + "path":"/_search/point_in_time/_all", + "methods":[ + "DELETE" + ] + } + ] + }, + "body":{ + "description":"A comma-separated list of pit IDs to clear" + } + } +} diff --git a/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java index 731e776e52191..c62d26f09b26b 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.util.SetOnce; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; @@ -99,17 +98,17 @@ void deleteAllPits(ActionListener listener) { new ActionListener<>() { @Override public void onResponse(final Collection responses) { - //final SetOnce succeeded = new SetOnce<>(); - boolean hasFailures = responses.stream().anyMatch(r-> !r.isFreed()); + // final SetOnce succeeded = new SetOnce<>(); + boolean hasFailures = responses.stream().anyMatch(r -> !r.isFreed()); listener.onResponse(new DeletePitResponse(!hasFailures)); -// for (SearchTransportService.SearchFreeContextResponse response : responses) { -// if (!response.isFreed()) { -// succeeded.set(false); -// break; -// } -// } -// succeeded.trySet(true); -// listener.onResponse(new DeletePitResponse(succeeded.get())); + // for (SearchTransportService.SearchFreeContextResponse response : responses) { + // if (!response.isFreed()) { + // succeeded.set(false); + // break; + // } + // } + // succeeded.trySet(true); + // listener.onResponse(new DeletePitResponse(succeeded.get())); } @Override From 87cb8a59602dfeff048c09a81f537fe157010d4f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 May 2022 10:31:26 -0700 Subject: [PATCH 14/19] Bump com.diffplug.spotless from 6.5.2 to 6.6.1 (#3356) Bumps com.diffplug.spotless from 6.5.2 to 6.6.1. --- updated-dependencies: - dependency-name: com.diffplug.spotless dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index fc8dc2be6aa47..9f2c483fa8de0 100644 --- a/build.gradle +++ b/build.gradle @@ -48,7 +48,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.5.2" apply false + id "com.diffplug.spotless" version "6.6.1" apply false id "org.gradle.test-retry" version "1.4.0" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' From 76b5ea630976602e465e67d605a2d9dcf5eabc74 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 May 2022 10:32:47 -0700 Subject: [PATCH 15/19] Bump grpc-context from 1.45.1 to 1.46.0 in /plugins/repository-gcs (#3361) * Bump grpc-context from 1.45.1 to 1.46.0 in /plugins/repository-gcs Bumps [grpc-context](https://github.com/grpc/grpc-java) from 1.45.1 to 1.46.0. - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.45.1...v1.46.0) --- updated-dependencies: - dependency-name: io.grpc:grpc-context dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-gcs/build.gradle | 2 +- plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 | 1 - plugins/repository-gcs/licenses/grpc-context-1.46.0.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/grpc-context-1.46.0.jar.sha1 diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index c7b6dd5ae0468..67468639dc354 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -75,7 +75,7 @@ dependencies { api 'com.google.http-client:google-http-client-jackson2:1.35.0' api 'com.google.http-client:google-http-client-gson:1.41.4' api 'com.google.api:gax-httpjson:0.101.0' - api 'io.grpc:grpc-context:1.45.1' + api 'io.grpc:grpc-context:1.46.0' api 'io.opencensus:opencensus-api:0.18.0' api 'io.opencensus:opencensus-contrib-http-util:0.18.0' api 'com.google.apis:google-api-services-storage:v1-rev20200814-1.30.10' diff --git a/plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 deleted file mode 100644 index eb7e4fcd78e97..0000000000000 --- a/plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -485a08c019cc78914a477b1dfc7052820b8d822c \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-context-1.46.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.46.0.jar.sha1 new file mode 100644 index 0000000000000..e4d4c881492a2 --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-context-1.46.0.jar.sha1 @@ -0,0 +1 @@ +621d765a459758cdcd9aa4b4795392a4353e3576 \ No newline at end of file From 892e9846b599fe7717bc1cb1748c7c1a75a47ae4 Mon Sep 17 00:00:00 2001 From: Suraj Singh Date: Wed, 18 May 2022 10:43:32 -0700 Subject: [PATCH 16/19] [Type removal] Remove redundant _type in pipeline simulate action (#3371) Signed-off-by: Suraj Singh --- .../java/org/opensearch/client/IngestRequestConvertersTests.java | 1 - .../resources/rest-api-spec/test/ingest/120_grok.yml | 1 - .../java/org/opensearch/ingest/IngestClientIT.java | 1 - 3 files changed, 3 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java index e0c7f69325f87..200069ade1ea2 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java @@ -121,7 +121,6 @@ public void testSimulatePipeline() throws IOException { + " \"docs\": [" + " {" + " \"_index\": \"index\"," - + " \"_type\": \"_doc\"," + " \"_id\": \"id\"," + " \"_source\": {" + " \"foo\": \"rab\"" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml index 14c70c17265af..c0aec0e3d7392 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml @@ -134,7 +134,6 @@ teardown: "docs": [ { "_index": "index", - "_type": "type", "_id": "id", "_source": { "field": "abc2xyz" diff --git a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java index d577019590019..404b13aae5b9c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/ingest/IngestClientIT.java @@ -111,7 +111,6 @@ public void testSimulate() throws Exception { .startArray("docs") .startObject() .field("_index", "index") - .field("_type", "type") .field("_id", "id") .startObject("_source") .field("foo", "bar") From f8b102c565362def6908eed7dc82ae2b3cec8cb1 Mon Sep 17 00:00:00 2001 From: Ankit Jain Date: Wed, 18 May 2022 10:51:21 -0700 Subject: [PATCH 17/19] Removing hard coded value of max concurrent shard requests (#3364) Signed-off-by: Ankit Jain --- .../org/opensearch/action/search/TransportSearchAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java index 5dd576fa6ce53..1ca477942cdf6 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java @@ -340,7 +340,7 @@ public AbstractSearchAsyncAction asyncSearchAction( clusterState, task, new ArraySearchPhaseResults<>(shardsIts.size()), - 1, + searchRequest.getMaxConcurrentShardRequests(), clusters ) { @Override From 248f1884513c10ecc5108fe012d433f65fe98946 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Wed, 18 May 2022 12:30:06 +0530 Subject: [PATCH 18/19] fixing tests and adding util Signed-off-by: Bharathwaj G --- .../opensearch/client/RequestConverters.java | 7 +- .../client/RestHighLevelClient.java | 12 +- .../client/RequestConvertersTests.java | 8 +- .../java/org/opensearch/client/SearchIT.java | 19 +- .../action/search/CreatePitController.java | 2 +- .../opensearch/action/search/SearchUtils.java | 58 ++ .../search/TransportDeletePitAction.java | 59 +- .../search/CreatePitControllerTests.java | 88 +- .../action/search/PitTestsUtil.java | 84 ++ .../search/TransportDeletePitActionTests.java | 850 +++++++++--------- 10 files changed, 654 insertions(+), 533 deletions(-) create mode 100644 server/src/test/java/org/opensearch/action/search/PitTestsUtil.java diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index e8309f40b0ec7..b872ee21f9927 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -53,7 +53,12 @@ import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.search.*; +import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchScrollRequest; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.WriteRequest; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index 5d2b1e95e2ee0..4cb3ff999f793 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -57,7 +57,17 @@ import org.opensearch.action.get.MultiGetResponse; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.search.*; +import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.DeletePitResponse; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.MultiSearchResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchScrollRequest; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.action.update.UpdateRequest; import org.opensearch.action.update.UpdateResponse; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java index e8dbf9b5ea403..087d25fbd757d 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java @@ -52,7 +52,13 @@ import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.search.*; +import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.action.search.SearchType; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.WriteRequest; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java index fc7bfea247a3b..c0fd95e4c4e52 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java @@ -41,7 +41,17 @@ import org.opensearch.action.fieldcaps.FieldCapabilities; import org.opensearch.action.fieldcaps.FieldCapabilitiesRequest; import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse; -import org.opensearch.action.search.*; +import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; +import org.opensearch.action.search.DeletePitRequest; +import org.opensearch.action.search.DeletePitResponse; +import org.opensearch.action.search.MultiSearchRequest; +import org.opensearch.action.search.MultiSearchResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchScrollRequest; import org.opensearch.client.core.CountRequest; import org.opensearch.client.core.CountResponse; import org.opensearch.common.Strings; @@ -95,7 +105,12 @@ import org.junit.Before; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; diff --git a/server/src/main/java/org/opensearch/action/search/CreatePitController.java b/server/src/main/java/org/opensearch/action/search/CreatePitController.java index 3d2ecc8b695c6..3aa7b60f18fc3 100644 --- a/server/src/main/java/org/opensearch/action/search/CreatePitController.java +++ b/server/src/main/java/org/opensearch/action/search/CreatePitController.java @@ -268,6 +268,6 @@ public void onFailure(Exception e) { logger.error("Cleaning up PIT contexts failed ", e); } }; - ClearScrollController.closeContexts(clusterService.state().getNodes(), searchTransportService, contexts, deleteListener); + SearchUtils.deletePits(contexts, deleteListener, clusterService.state(), searchTransportService); } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchUtils.java b/server/src/main/java/org/opensearch/action/search/SearchUtils.java index 148d1645568b1..b9eea817c75b9 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchUtils.java +++ b/server/src/main/java/org/opensearch/action/search/SearchUtils.java @@ -8,18 +8,28 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.OpenSearchException; +import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; +import org.opensearch.action.support.GroupedActionListener; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.common.Strings; import org.opensearch.transport.RemoteClusterService; +import org.opensearch.transport.Transport; +import java.util.Collection; import java.util.Set; import java.util.function.BiFunction; +import java.util.stream.Collectors; /** * Helper class for common search functions */ public class SearchUtils { + private static final Logger logger = LogManager.getLogger(CreatePitController.class); public SearchUtils() {} @@ -40,4 +50,52 @@ public static StepListener> getConnect } return lookupListener; } + + /** + * Delete list of pits, return success if all reader contexts are deleted ( or not found ). + */ + public static void deletePits( + Collection contexts, + ActionListener listener, + ClusterState state, + SearchTransportService searchTransportService + ) { + final Set clusters = contexts.stream() + .filter(ctx -> Strings.isEmpty(ctx.getClusterAlias()) == false) + .map(SearchContextIdForNode::getClusterAlias) + .collect(Collectors.toSet()); + StepListener> lookupListener = getConnectionLookupListener( + searchTransportService.getRemoteClusterService(), + state, + clusters + ); + lookupListener.whenComplete(nodeLookup -> { + final GroupedActionListener groupedListener = new GroupedActionListener<>( + ActionListener.delegateFailure( + listener, + (l, result) -> l.onResponse(Math.toIntExact(result.stream().filter(r -> r).count())) + ), + contexts.size() + ); + + for (SearchContextIdForNode contextId : contexts) { + final DiscoveryNode node = nodeLookup.apply(contextId.getClusterAlias(), contextId.getNode()); + if (node == null) { + groupedListener.onFailure(new OpenSearchException("node not found")); + } else { + try { + final Transport.Connection connection = searchTransportService.getConnection(contextId.getClusterAlias(), node); + searchTransportService.sendFreePITContext( + connection, + contextId.getSearchContextId(), + ActionListener.wrap(r -> groupedListener.onResponse(r.isFreed()), e -> groupedListener.onResponse(false)) + ); + } catch (Exception e) { + logger.debug("Delete PIT failed ", e); + groupedListener.onResponse(false); + } + } + } + }, listener::onFailure); + } } diff --git a/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java index c62d26f09b26b..234fceaa92bb4 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportDeletePitAction.java @@ -11,15 +11,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; -import org.opensearch.action.StepListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.GroupedActionListener; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.tasks.Task; @@ -29,9 +26,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.Set; -import java.util.function.BiFunction; -import java.util.stream.Collectors; /** * Transport action for deleting pit reader context - supports deleting list and all pit contexts @@ -98,17 +92,8 @@ void deleteAllPits(ActionListener listener) { new ActionListener<>() { @Override public void onResponse(final Collection responses) { - // final SetOnce succeeded = new SetOnce<>(); boolean hasFailures = responses.stream().anyMatch(r -> !r.isFreed()); listener.onResponse(new DeletePitResponse(!hasFailures)); - // for (SearchTransportService.SearchFreeContextResponse response : responses) { - // if (!response.isFreed()) { - // succeeded.set(false); - // break; - // } - // } - // succeeded.trySet(true); - // listener.onResponse(new DeletePitResponse(succeeded.get())); } @Override @@ -133,48 +118,6 @@ public void onFailure(final Exception e) { * Delete list of pits, return success if all reader contexts are deleted ( or not found ). */ void deletePits(List contexts, ActionListener listener) { - final StepListener> lookupListener = getLookupListener(contexts); - lookupListener.whenComplete(nodeLookup -> { - final GroupedActionListener groupedListener = new GroupedActionListener<>( - ActionListener.delegateFailure( - listener, - (l, result) -> l.onResponse(Math.toIntExact(result.stream().filter(r -> r).count())) - ), - contexts.size() - ); - - for (SearchContextIdForNode contextId : contexts) { - final DiscoveryNode node = nodeLookup.apply(contextId.getClusterAlias(), contextId.getNode()); - if (node == null) { - groupedListener.onFailure(new OpenSearchException("node not found")); - } else { - try { - final Transport.Connection connection = searchTransportService.getConnection(contextId.getClusterAlias(), node); - searchTransportService.sendFreePITContext( - connection, - contextId.getSearchContextId(), - ActionListener.wrap(r -> groupedListener.onResponse(r.isFreed()), e -> groupedListener.onResponse(false)) - ); - } catch (Exception e) { - logger.debug("Delete PIT failed ", e); - groupedListener.onResponse(false); - } - } - } - }, listener::onFailure); - } - - private StepListener> getLookupListener(List contexts) { - final StepListener> lookupListener = new StepListener<>(); - final Set clusters = contexts.stream() - .filter(ctx -> Strings.isEmpty(ctx.getClusterAlias()) == false) - .map(SearchContextIdForNode::getClusterAlias) - .collect(Collectors.toSet()); - if (clusters.isEmpty() == false) { - searchTransportService.getRemoteClusterService().collectNodes(clusters, lookupListener); - } else { - lookupListener.onResponse((cluster, nodeId) -> clusterService.state().getNodes().get(nodeId)); - } - return lookupListener; + SearchUtils.deletePits(contexts, listener, clusterService.state(), searchTransportService); } } diff --git a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java index f7078762dee5b..b608c92c95f40 100644 --- a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java +++ b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java @@ -22,18 +22,13 @@ import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.concurrent.AtomicArray; import org.opensearch.index.query.IdsQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.TermQueryBuilder; -import org.opensearch.index.shard.ShardId; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; -import org.opensearch.search.SearchPhaseResult; -import org.opensearch.search.SearchShardTarget; import org.opensearch.search.aggregations.InternalAggregations; -import org.opensearch.search.internal.AliasFilter; import org.opensearch.search.internal.InternalSearchResponse; import org.opensearch.search.internal.ShardSearchContextId; import org.opensearch.tasks.Task; @@ -44,17 +39,17 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.RemoteClusterConnectionTests; import org.opensearch.transport.Transport; + import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.opensearch.action.search.PitTestsUtil.getPitId; /** * Functional tests for various methods in create pit controller. Covers update pit phase specifically since @@ -213,6 +208,20 @@ public void sendFreeContext( t.start(); } + /** + * Test if cleanup request is called + */ + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + @Override public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { return new SearchAsyncActionTests.MockConnection(node); @@ -297,8 +306,11 @@ public Transport.Connection getConnection(String clusterAlias, DiscoveryNode nod return new SearchAsyncActionTests.MockConnection(node); } + /** + * Test if cleanup request is called + */ @Override - public void sendFreeContext( + public void sendFreePITContext( Transport.Connection connection, ShardSearchContextId contextId, ActionListener listener @@ -394,8 +406,11 @@ public void updatePitContext( } } + /** + * Test if cleanup request is called + */ @Override - public void sendFreeContext( + public void sendFreePITContext( Transport.Connection connection, ShardSearchContextId contextId, ActionListener listener @@ -484,8 +499,11 @@ public void updatePitContext( t.start(); } + /** + * Test if cleanup request is called + */ @Override - public void sendFreeContext( + public void sendFreePITContext( Transport.Connection connection, ShardSearchContextId contextId, ActionListener listener @@ -539,54 +557,4 @@ public void onFailure(Exception e) { } } - - public static QueryBuilder randomQueryBuilder() { - if (randomBoolean()) { - return new TermQueryBuilder(randomAlphaOfLength(10), randomAlphaOfLength(10)); - } else if (randomBoolean()) { - return new MatchAllQueryBuilder(); - } else { - return new IdsQueryBuilder().addIds(randomAlphaOfLength(10)); - } - } - - public static String getPitId() { - AtomicArray array = new AtomicArray<>(3); - SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult( - new ShardSearchContextId("a", 1), - null - ); - testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); - SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult( - new ShardSearchContextId("b", 12), - null - ); - testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); - SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult( - new ShardSearchContextId("c", 42), - null - ); - testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); - array.setOnce(0, testSearchPhaseResult1); - array.setOnce(1, testSearchPhaseResult2); - array.setOnce(2, testSearchPhaseResult3); - - final Version version = Version.CURRENT; - final Map aliasFilters = new HashMap<>(); - for (SearchPhaseResult result : array.asList()) { - final AliasFilter aliasFilter; - if (randomBoolean()) { - aliasFilter = new AliasFilter(randomQueryBuilder()); - } else if (randomBoolean()) { - aliasFilter = new AliasFilter(randomQueryBuilder(), "alias-" + between(1, 10)); - } else { - aliasFilter = AliasFilter.EMPTY; - } - if (randomBoolean()) { - aliasFilters.put(result.getSearchShardTarget().getShardId().getIndex().getUUID(), aliasFilter); - } - } - return SearchContextId.encode(array.asList(), aliasFilters, version); - } - } diff --git a/server/src/test/java/org/opensearch/action/search/PitTestsUtil.java b/server/src/test/java/org/opensearch/action/search/PitTestsUtil.java new file mode 100644 index 0000000000000..ec83cb45697d9 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/search/PitTestsUtil.java @@ -0,0 +1,84 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.Version; +import org.opensearch.common.util.concurrent.AtomicArray; +import org.opensearch.index.query.IdsQueryBuilder; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.shard.ShardId; +import org.opensearch.search.SearchPhaseResult; +import org.opensearch.search.SearchShardTarget; +import org.opensearch.search.internal.AliasFilter; +import org.opensearch.search.internal.ShardSearchContextId; + +import java.util.HashMap; +import java.util.Map; + +import static org.opensearch.test.OpenSearchTestCase.between; +import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLength; +import static org.opensearch.test.OpenSearchTestCase.randomBoolean; + +/** + * Helper class for common pit tests functions + */ +public class PitTestsUtil { + private PitTestsUtil() {} + + public static QueryBuilder randomQueryBuilder() { + if (randomBoolean()) { + return new TermQueryBuilder(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } else if (randomBoolean()) { + return new MatchAllQueryBuilder(); + } else { + return new IdsQueryBuilder().addIds(randomAlphaOfLength(10)); + } + } + + public static String getPitId() { + AtomicArray array = new AtomicArray<>(3); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("a", 1), + null + ); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("b", 12), + null + ); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("c", 42), + null + ); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + array.setOnce(0, testSearchPhaseResult1); + array.setOnce(1, testSearchPhaseResult2); + array.setOnce(2, testSearchPhaseResult3); + + final Version version = Version.CURRENT; + final Map aliasFilters = new HashMap<>(); + for (SearchPhaseResult result : array.asList()) { + final AliasFilter aliasFilter; + if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder()); + } else if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder(), "alias-" + between(1, 10)); + } else { + aliasFilter = AliasFilter.EMPTY; + } + if (randomBoolean()) { + aliasFilters.put(result.getSearchShardTarget().getShardId().getIndex().getUUID(), aliasFilter); + } + } + return SearchContextId.encode(array.asList(), aliasFilters, version); + } +} diff --git a/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java index 66a0b771bbe44..86c5b06e1b0fd 100644 --- a/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java @@ -20,7 +20,6 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.UUIDs; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; import org.opensearch.index.query.IdsQueryBuilder; @@ -34,28 +33,30 @@ import org.opensearch.search.internal.ShardSearchContextId; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskId; -import org.opensearch.tasks.TaskManager; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.transport.MockTransportService; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.RemoteClusterConnectionTests; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportResponse; -import org.opensearch.transport.TransportService; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.opensearch.action.search.PitTestsUtil.getPitId; import static org.opensearch.action.support.PlainActionFuture.newFuture; +import static org.opensearch.transport.RemoteClusterConnectionTests.startTransport; /** * Functional tests for transport delete pit action */ public class TransportDeletePitActionTests extends OpenSearchTestCase { - DiscoveryNode node1 = null; DiscoveryNode node2 = null; DiscoveryNode node3 = null; @@ -65,13 +66,34 @@ public class TransportDeletePitActionTests extends OpenSearchTestCase { DiscoveryNodes nodes = null; NamedWriteableRegistry namedWriteableRegistry = null; ClusterService clusterServiceMock = null; + Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); + private ThreadPool threadPool = new ThreadPool(settings); + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } + + private MockTransportService startTransport(String id, List knownNodes, Version version) { + return startTransport(id, knownNodes, version, Settings.EMPTY); + } + + private MockTransportService startTransport( + final String id, + final List knownNodes, + final Version version, + final Settings settings + ) { + return RemoteClusterConnectionTests.startTransport(id, knownNodes, version, threadPool, settings); + } @Before public void setupData() { node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); - pitId = CreatePitControllerTests.getPitId(); + pitId = getPitId(); namedWriteableRegistry = new NamedWriteableRegistry( Arrays.asList( new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new), @@ -116,469 +138,479 @@ public void setupData() { */ public void testDeletePitSuccess() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - - @Override - public void sendFreePITContext( - Transport.Connection connection, - ShardSearchContextId contextId, - ActionListener listener - ) { - deleteNodesInvoked.add(connection.getNode()); - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); - t.start(); - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(true, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(true, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + + } } } public void testDeleteAllPITSuccess() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { - deleteNodesInvoked.add(connection.getNode()); - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); - t.start(); - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(true, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(true, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + + } } } public void testDeletePitWhenNodeIsDown() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - - @Override - public void sendFreePITContext( - Transport.Connection connection, - ShardSearchContextId contextId, - ActionListener listener - ) { - deleteNodesInvoked.add(connection.getNode()); - - if (connection.getNode().getId() == "node_3") { - Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); - t.start(); - } else { - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); - t.start(); - } - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(false, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } } } public void testDeletePitWhenAllNodesAreDown() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - - @Override - public void sendFreePITContext( - Transport.Connection connection, - ShardSearchContextId contextId, - ActionListener listener - ) { - deleteNodesInvoked.add(connection.getNode()); - Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); - t.start(); - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(false, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } } } public void testDeletePitFailure() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - - @Override - public void sendFreePITContext( - Transport.Connection connection, - ShardSearchContextId contextId, - ActionListener listener - ) { - deleteNodesInvoked.add(connection.getNode()); - - if (connection.getNode().getId() == "node_3") { - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(false))); - t.start(); - } else { - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); - t.start(); - } - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(false, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + + @Override + public void sendFreePITContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(false))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest(pitId); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } } } public void testDeleteAllPitWhenNodeIsDown() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - @Override - public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { - deleteNodesInvoked.add(connection.getNode()); - if (connection.getNode().getId() == "node_3") { - Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); - t.start(); - } else { - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); - t.start(); - } - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(false, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + @Override + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } } } public void testDeleteAllPitWhenAllNodesAreDown() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - - @Override - public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { - deleteNodesInvoked.add(connection.getNode()); - Thread t = new Thread(() -> listener.onFailure(new Exception("node down"))); - t.start(); - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(false, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + + @Override + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onFailure(new Exception("node down"))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } } } public void testDeleteAllPitFailure() throws InterruptedException, ExecutionException { List deleteNodesInvoked = new CopyOnWriteArrayList<>(); - Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); when(actionFilters.filters()).thenReturn(new ActionFilter[0]); - ThreadPool threadPool = new ThreadPool(settings); - try { - SearchTransportService searchTransportService = new SearchTransportService(null, null) { - - public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { - deleteNodesInvoked.add(connection.getNode()); - if (connection.getNode().getId() == "node_3") { - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(false))); - t.start(); - } else { - Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); - t.start(); - } - } - - @Override - public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { - return new SearchAsyncActionTests.MockConnection(node); - } - }; - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - threadPool, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> DiscoveryNode.createLocal(settings, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), - null, - Collections.emptySet() + + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) ) { - @Override - public TaskManager getTaskManager() { - return taskManager; - } - }; - TransportDeletePitAction action = new TransportDeletePitAction( - transportService, - actionFilters, - namedWriteableRegistry, - transportSearchAction, - clusterServiceMock, - searchTransportService - ); - DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); - PlainActionFuture future = newFuture(); - action.execute(task, deletePITRequest, future); - DeletePitResponse dr = future.get(); - assertEquals(false, dr.isSucceeded()); - assertEquals(3, deleteNodesInvoked.size()); - } finally { - assertTrue(OpenSearchTestCase.terminate(threadPool)); + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + + public void sendFreeAllPitContexts(Transport.Connection connection, final ActionListener listener) { + deleteNodesInvoked.add(connection.getNode()); + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(false))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + transportService, + actionFilters, + namedWriteableRegistry, + transportSearchAction, + clusterServiceMock, + searchTransportService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + PlainActionFuture future = newFuture(); + action.execute(task, deletePITRequest, future); + DeletePitResponse dr = future.get(); + assertEquals(false, dr.isSucceeded()); + assertEquals(3, deleteNodesInvoked.size()); + } } } From 4a1cbd9e19521881b741a12addfbc8463965b6e0 Mon Sep 17 00:00:00 2001 From: Bharathwaj G <58062316+bharath-techie@users.noreply.github.com> Date: Thu, 19 May 2022 11:08:28 +0530 Subject: [PATCH 19/19] Create PIT API (#2745) * Create Point In Time API changes Signed-off-by: Bharathwaj G --- .../opensearch/client/RequestConverters.java | 23 +- .../client/RestHighLevelClient.java | 42 ++ .../java/org/opensearch/client/PitIT.java | 58 ++ .../client/RequestConvertersTests.java | 23 + .../java/org/opensearch/client/SearchIT.java | 35 ++ .../rest-api-spec/api/create_pit.json | 43 ++ .../search/searchafter/SearchAfterIT.java | 59 +- .../search/slice/SearchSliceIT.java | 86 ++- .../org/opensearch/action/ActionModule.java | 7 + .../action/search/CreatePitAction.java | 23 + .../action/search/CreatePitController.java | 273 ++++++++ .../action/search/CreatePitRequest.java | 195 ++++++ .../action/search/CreatePitResponse.java | 232 +++++++ .../action/search/SearchContextId.java | 2 +- .../action/search/SearchTransportService.java | 75 +++ .../opensearch/action/search/SearchUtils.java | 43 ++ .../search/TransportCreatePitAction.java | 139 ++++ .../search/UpdatePitContextRequest.java | 67 ++ .../search/UpdatePitContextResponse.java | 58 ++ .../java/org/opensearch/client/Client.java | 7 + .../client/support/AbstractClient.java | 8 + .../common/settings/ClusterSettings.java | 4 + .../common/settings/IndexScopedSettings.java | 1 + .../org/opensearch/index/IndexSettings.java | 28 + .../index/shard/SearchOperationListener.java | 44 ++ .../action/search/RestCreatePitAction.java | 57 ++ .../search/DefaultSearchContext.java | 20 +- .../org/opensearch/search/SearchService.java | 158 ++++- .../search/internal/PitReaderContext.java | 70 +++ .../search/internal/ReaderContext.java | 15 +- .../search/CreatePitControllerTests.java | 592 ++++++++++++++++++ .../search/DefaultSearchContextTests.java | 51 +- .../opensearch/search/PitMultiNodeTests.java | 211 +++++++ .../opensearch/search/PitSingleNodeTests.java | 575 +++++++++++++++++ .../opensearch/search/SearchServiceTests.java | 100 ++- .../search/pit/RestCreatePitActionTests.java | 78 +++ 36 files changed, 3481 insertions(+), 21 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/create_pit.json create mode 100644 server/src/main/java/org/opensearch/action/search/CreatePitAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/CreatePitController.java create mode 100644 server/src/main/java/org/opensearch/action/search/CreatePitRequest.java create mode 100644 server/src/main/java/org/opensearch/action/search/CreatePitResponse.java create mode 100644 server/src/main/java/org/opensearch/action/search/SearchUtils.java create mode 100644 server/src/main/java/org/opensearch/action/search/TransportCreatePitAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/UpdatePitContextRequest.java create mode 100644 server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java create mode 100644 server/src/main/java/org/opensearch/rest/action/search/RestCreatePitAction.java create mode 100644 server/src/main/java/org/opensearch/search/internal/PitReaderContext.java create mode 100644 server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java create mode 100644 server/src/test/java/org/opensearch/search/PitMultiNodeTests.java create mode 100644 server/src/test/java/org/opensearch/search/PitSingleNodeTests.java create mode 100644 server/src/test/java/org/opensearch/search/pit/RestCreatePitActionTests.java diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index afecdc3eea1a3..277759c921fbf 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -54,6 +54,7 @@ import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.CreatePitRequest; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchScrollRequest; @@ -92,6 +93,7 @@ import org.opensearch.index.reindex.ReindexRequest; import org.opensearch.index.reindex.UpdateByQueryRequest; import org.opensearch.index.seqno.SequenceNumbers; +import org.opensearch.rest.action.search.RestCreatePitAction; import org.opensearch.rest.action.search.RestSearchAction; import org.opensearch.script.mustache.MultiSearchTemplateRequest; import org.opensearch.script.mustache.SearchTemplateRequest; @@ -433,9 +435,15 @@ static void addSearchRequestParams(Params params, SearchRequest searchRequest) { params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); params.withRouting(searchRequest.routing()); params.withPreference(searchRequest.preference()); - params.withIndicesOptions(searchRequest.indicesOptions()); + if (searchRequest.pointInTimeBuilder() == null) { + params.withIndicesOptions(searchRequest.indicesOptions()); + } params.withSearchType(searchRequest.searchType().name().toLowerCase(Locale.ROOT)); - params.putParam("ccs_minimize_roundtrips", Boolean.toString(searchRequest.isCcsMinimizeRoundtrips())); + if (searchRequest.pointInTimeBuilder() != null) { + params.putParam("ccs_minimize_roundtrips", "false"); + } else { + params.putParam("ccs_minimize_roundtrips", Boolean.toString(searchRequest.isCcsMinimizeRoundtrips())); + } if (searchRequest.getPreFilterShardSize() != null) { params.putParam("pre_filter_shard_size", Integer.toString(searchRequest.getPreFilterShardSize())); } @@ -458,6 +466,17 @@ static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOEx return request; } + static Request createPit(CreatePitRequest createPitRequest) throws IOException { + Params params = new Params(); + params.putParam(RestCreatePitAction.ALLOW_PARTIAL_PIT_CREATION, Boolean.toString(createPitRequest.shouldAllowPartialPitCreation())); + params.putParam(RestCreatePitAction.KEEP_ALIVE, createPitRequest.getKeepAlive()); + params.withIndicesOptions(createPitRequest.indicesOptions()); + Request request = new Request(HttpPost.METHOD_NAME, endpoint(createPitRequest.indices(), "_search/point_in_time")); + request.addParameters(params.asMap()); + request.setEntity(createEntity(createPitRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request clearScroll(ClearScrollRequest clearScrollRequest) throws IOException { Request request = new Request(HttpDelete.METHOD_NAME, "/_search/scroll"); request.setEntity(createEntity(clearScrollRequest, REQUEST_BODY_CONTENT_TYPE)); diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index d293b979debb5..f3360630a26b7 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -59,6 +59,8 @@ import org.opensearch.action.index.IndexResponse; import org.opensearch.action.search.ClearScrollRequest; import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.action.search.SearchRequest; @@ -1256,6 +1258,46 @@ public final Cancellable scrollAsync( ); } + /** + * Create PIT context using create PIT API + * + * @param createPitRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + */ + public final CreatePitResponse createPit(CreatePitRequest createPitRequest, RequestOptions options) throws IOException { + return performRequestAndParseEntity( + createPitRequest, + RequestConverters::createPit, + options, + CreatePitResponse::fromXContent, + emptySet() + ); + } + + /** + * Asynchronously Create PIT context using create PIT API + * + * @param createPitRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return the response + */ + public final Cancellable createPitAsync( + CreatePitRequest createPitRequest, + RequestOptions options, + ActionListener listener + ) { + return performRequestAsyncAndParseEntity( + createPitRequest, + RequestConverters::createPit, + options, + CreatePitResponse::fromXContent, + listener, + emptySet() + ); + } + /** * Clears one or more scroll ids using the Clear Scroll API. * diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java new file mode 100644 index 0000000000000..99901eabc91aa --- /dev/null +++ b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java @@ -0,0 +1,58 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.client; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.junit.Before; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; +import org.opensearch.common.unit.TimeValue; + +import java.io.IOException; +import java.util.concurrent.TimeUnit; + +/** + * Tests point in time API with rest high level client + */ +public class PitIT extends OpenSearchRestHighLevelClientTestCase { + + @Before + public void indexDocuments() throws IOException { + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/_doc/1"); + doc1.setJsonEntity("{\"type\":\"type1\", \"id\":1, \"num\":10, \"num2\":50}"); + client().performRequest(doc1); + Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/_doc/2"); + doc2.setJsonEntity("{\"type\":\"type1\", \"id\":2, \"num\":20, \"num2\":40}"); + client().performRequest(doc2); + Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/_doc/3"); + doc3.setJsonEntity("{\"type\":\"type1\", \"id\":3, \"num\":50, \"num2\":35}"); + client().performRequest(doc3); + Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/_doc/4"); + doc4.setJsonEntity("{\"type\":\"type2\", \"id\":4, \"num\":100, \"num2\":10}"); + client().performRequest(doc4); + Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/_doc/5"); + doc5.setJsonEntity("{\"type\":\"type2\", \"id\":5, \"num\":100, \"num2\":10}"); + client().performRequest(doc5); + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); + } + + public void testCreatePit() throws IOException { + CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "index"); + CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + assertTrue(pitResponse.getId() != null); + assertEquals(1, pitResponse.getTotalShards()); + assertEquals(1, pitResponse.getSuccessfulShards()); + assertEquals(0, pitResponse.getFailedShards()); + assertEquals(0, pitResponse.getSkippedShards()); + } + /** + * Todo: add deletion logic and test cluster settings + */ +} diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java index 0415b864ba35e..4f0b2ac0d88a1 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java @@ -53,6 +53,7 @@ import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.search.ClearScrollRequest; +import org.opensearch.action.search.CreatePitRequest; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchScrollRequest; @@ -131,6 +132,7 @@ import java.util.Locale; import java.util.Map; import java.util.StringJoiner; +import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; @@ -1303,6 +1305,27 @@ public void testClearScroll() throws IOException { assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } + public void testCreatePit() throws IOException { + String[] indices = randomIndicesNames(0, 5); + Map expectedParams = new HashMap<>(); + expectedParams.put("keep_alive", "1d"); + expectedParams.put("allow_partial_pit_creation", "true"); + CreatePitRequest createPitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, indices); + setRandomIndicesOptions(createPitRequest::indicesOptions, createPitRequest::indicesOptions, expectedParams); + Request request = RequestConverters.createPit(createPitRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_search/point_in_time"); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals(endpoint.toString(), request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertToXContentBody(createPitRequest, request.getEntity()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + } + public void testSearchTemplate() throws Exception { // Create a random request. String[] indices = randomIndicesNames(0, 5); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java index 19e287fb91be5..01a7f892c80a1 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java @@ -43,6 +43,8 @@ import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse; import org.opensearch.action.search.ClearScrollRequest; import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.action.search.SearchRequest; @@ -89,6 +91,7 @@ import org.opensearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; import org.opensearch.search.aggregations.support.MultiValuesSourceFieldConfig; import org.opensearch.search.aggregations.support.ValueType; +import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.FetchSourceContext; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; @@ -105,6 +108,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertToXContentEquivalent; @@ -762,6 +766,37 @@ public void testSearchScroll() throws Exception { } } + public void testSearchWithPit() throws Exception { + for (int i = 0; i < 100; i++) { + XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); + Request doc = new Request(HttpPut.METHOD_NAME, "/test/_doc/" + Integer.toString(i)); + doc.setJsonEntity(Strings.toString(builder)); + client().performRequest(doc); + } + client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh")); + + CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "test"); + CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35) + .sort("field", SortOrder.ASC) + .pointInTimeBuilder(new PointInTimeBuilder(pitResponse.getId())); + SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + + try { + long counter = 0; + assertSearchHeader(searchResponse); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); + } + } finally { + // TODO : Delete PIT + } + } + public void testMultiSearch() throws Exception { MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); SearchRequest searchRequest1 = new SearchRequest("index1"); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/create_pit.json b/rest-api-spec/src/main/resources/rest-api-spec/api/create_pit.json new file mode 100644 index 0000000000000..eb5f4977b99d9 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/create_pit.json @@ -0,0 +1,43 @@ +{ + "create_pit":{ + "documentation":{ + "url":"https://opensearch.org/docs/latest/opensearch/rest-api/point_in_time/", + "description":"Creates point in time context." + }, + "stability":"stable", + "url":{ + "paths":[ + { + "path":"/{index}/_search/point_in_time", + "methods":[ + "POST" + ], + "parts":{ + "index":{ + "type":"list", + "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" + } + } + } + ] + }, + "params":{ + "allow_partial_pit_creation":{ + "type":"boolean", + "description":"Allow if point in time can be created with partial failures" + }, + "keep_alive":{ + "type":"string", + "description":"Specify the keep alive for point in time" + }, + "preference":{ + "type":"string", + "description":"Specify the node or shard the operation should be performed on (default: random)" + }, + "routing":{ + "type":"list", + "description":"A comma-separated list of specific routing values" + } + } + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index 926e21294ffc8..c2591c28a95d4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -32,15 +32,21 @@ package org.opensearch.search.searchafter; +import org.opensearch.action.ActionFuture; import org.opensearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.opensearch.action.index.IndexRequestBuilder; +import org.opensearch.action.search.CreatePitAction; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.common.UUIDs; +import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; import org.hamcrest.Matchers; @@ -50,7 +56,6 @@ import java.util.Comparator; import java.util.Collections; import java.util.Arrays; - import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; @@ -155,6 +160,58 @@ public void testsShouldFail() throws Exception { } } + public void testPitWithSearchAfter() throws Exception { + assertAcked(client().admin().indices().prepareCreate("test").setMapping("field1", "type=long", "field2", "type=keyword").get()); + ensureGreen(); + indexRandom( + true, + client().prepareIndex("test").setId("0").setSource("field1", 0), + client().prepareIndex("test").setId("1").setSource("field1", 100, "field2", "toto"), + client().prepareIndex("test").setId("2").setSource("field1", 101), + client().prepareIndex("test").setId("3").setSource("field1", 99) + ); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "test" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + SearchResponse sr = client().prepareSearch() + .addSort("field1", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[] { 99 }) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .get(); + assertEquals(2, sr.getHits().getHits().length); + sr = client().prepareSearch() + .addSort("field1", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[] { 100 }) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .get(); + assertEquals(1, sr.getHits().getHits().length); + sr = client().prepareSearch() + .addSort("field1", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[] { 0 }) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .get(); + assertEquals(3, sr.getHits().getHits().length); + /** + * Add new data and assert PIT results remain the same and normal search results gets refreshed + */ + indexRandom(true, client().prepareIndex("test").setId("4").setSource("field1", 102)); + sr = client().prepareSearch() + .addSort("field1", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[] { 0 }) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .get(); + assertEquals(3, sr.getHits().getHits().length); + sr = client().prepareSearch().addSort("field1", SortOrder.ASC).setQuery(matchAllQuery()).searchAfter(new Object[] { 0 }).get(); + assertEquals(4, sr.getHits().getHits().length); + client().admin().indices().prepareDelete("test").get(); + } + public void testWithNullStrings() throws InterruptedException { assertAcked(client().admin().indices().prepareCreate("test").setMapping("field2", "type=keyword").get()); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index 9c735c42052e3..eacbcc42a8157 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -32,9 +32,13 @@ package org.opensearch.search.slice; +import org.opensearch.action.ActionFuture; import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; import org.opensearch.action.index.IndexRequestBuilder; +import org.opensearch.action.search.CreatePitAction; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; @@ -46,6 +50,7 @@ import org.opensearch.search.Scroll; import org.opensearch.search.SearchException; import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -86,7 +91,12 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe client().admin() .indices() .prepareCreate("test") - .setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000)) + .setSettings( + Settings.builder() + .put("number_of_shards", numberOfShards) + .put("index.max_slices_per_scroll", 10000) + .put("index.max_slices_per_pit", 10000) + ) .setMapping(mapping) ); ensureGreen(); @@ -129,6 +139,78 @@ public void testSearchSort() throws Exception { } } + public void testSearchSortWithoutPitOrScroll() throws Exception { + int numShards = randomIntBetween(1, 7); + int numDocs = randomIntBetween(100, 1000); + setupIndex(numDocs, numShards); + int fetchSize = randomIntBetween(10, 100); + SearchRequestBuilder request = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setSize(fetchSize) + .addSort(SortBuilders.fieldSort("_doc")); + SliceBuilder sliceBuilder = new SliceBuilder("_id", 0, 4); + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, () -> request.slice(sliceBuilder).get()); + assertTrue(ex.getMessage().contains("all shards failed")); + } + + public void testSearchSortWithPIT() throws Exception { + int numShards = randomIntBetween(1, 7); + int numDocs = randomIntBetween(100, 1000); + setupIndex(numDocs, numShards); + int max = randomIntBetween(2, numShards * 3); + CreatePitRequest pitRequest = new CreatePitRequest(TimeValue.timeValueDays(1), true); + pitRequest.setIndices(new String[] { "test" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, pitRequest); + CreatePitResponse pitResponse = execute.get(); + for (String field : new String[] { "_id", "random_int", "static_int" }) { + int fetchSize = randomIntBetween(10, 100); + + // test _doc sort + SearchRequestBuilder request = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(fetchSize) + .addSort(SortBuilders.fieldSort("_doc")); + assertSearchSlicesWithPIT(request, field, max, numDocs); + + // test numeric sort + request = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(fetchSize) + .addSort(SortBuilders.fieldSort("random_int")); + assertSearchSlicesWithPIT(request, field, max, numDocs); + } + client().admin().indices().prepareDelete("test").get(); + } + + private void assertSearchSlicesWithPIT(SearchRequestBuilder request, String field, int numSlice, int numDocs) { + int totalResults = 0; + List keys = new ArrayList<>(); + for (int id = 0; id < numSlice; id++) { + SliceBuilder sliceBuilder = new SliceBuilder(field, id, numSlice); + SearchResponse searchResponse = request.slice(sliceBuilder).setFrom(0).get(); + totalResults += searchResponse.getHits().getHits().length; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int numSliceResults = searchResponse.getHits().getHits().length; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } + while (searchResponse.getHits().getHits().length > 0) { + searchResponse = request.setFrom(numSliceResults).slice(sliceBuilder).get(); + totalResults += searchResponse.getHits().getHits().length; + numSliceResults += searchResponse.getHits().getHits().length; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + } + assertThat(totalResults, equalTo(numDocs)); + assertThat(keys.size(), equalTo(numDocs)); + assertThat(new HashSet(keys).size(), equalTo(numDocs)); + } + public void testWithPreferenceAndRoutings() throws Exception { int numShards = 10; int totalDocs = randomIntBetween(100, 1000); @@ -217,7 +299,7 @@ public void testInvalidQuery() throws Exception { ); Throwable rootCause = findRootCause(exc); assertThat(rootCause.getClass(), equalTo(SearchException.class)); - assertThat(rootCause.getMessage(), equalTo("`slice` cannot be used outside of a scroll context")); + assertThat(rootCause.getMessage(), equalTo("`slice` cannot be used outside of a scroll context or PIT context")); } private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String field, int numSlice, int numDocs) { diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index 790f8f6cbdc36..36b7aeea3d262 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -232,10 +232,12 @@ import org.opensearch.action.main.MainAction; import org.opensearch.action.main.TransportMainAction; import org.opensearch.action.search.ClearScrollAction; +import org.opensearch.action.search.CreatePitAction; import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.SearchAction; import org.opensearch.action.search.SearchScrollAction; import org.opensearch.action.search.TransportClearScrollAction; +import org.opensearch.action.search.TransportCreatePitAction; import org.opensearch.action.search.TransportMultiSearchAction; import org.opensearch.action.search.TransportSearchAction; import org.opensearch.action.search.TransportSearchScrollAction; @@ -396,6 +398,7 @@ import org.opensearch.rest.action.ingest.RestSimulatePipelineAction; import org.opensearch.rest.action.search.RestClearScrollAction; import org.opensearch.rest.action.search.RestCountAction; +import org.opensearch.rest.action.search.RestCreatePitAction; import org.opensearch.rest.action.search.RestExplainAction; import org.opensearch.rest.action.search.RestMultiSearchAction; import org.opensearch.rest.action.search.RestSearchAction; @@ -656,6 +659,7 @@ public void reg actions.register(ImportDanglingIndexAction.INSTANCE, TransportImportDanglingIndexAction.class); actions.register(DeleteDanglingIndexAction.INSTANCE, TransportDeleteDanglingIndexAction.class); actions.register(FindDanglingIndexAction.INSTANCE, TransportFindDanglingIndexAction.class); + actions.register(CreatePitAction.INSTANCE, TransportCreatePitAction.class); return unmodifiableMap(actions.getRegistry()); } @@ -828,6 +832,9 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestRepositoriesAction()); registerHandler.accept(new RestSnapshotAction()); registerHandler.accept(new RestTemplatesAction()); + + // Point in time API + registerHandler.accept(new RestCreatePitAction()); for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers( settings, diff --git a/server/src/main/java/org/opensearch/action/search/CreatePitAction.java b/server/src/main/java/org/opensearch/action/search/CreatePitAction.java new file mode 100644 index 0000000000000..1af56a044205b --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/CreatePitAction.java @@ -0,0 +1,23 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionType; + +/** + * Action type for creating PIT reader context + */ +public class CreatePitAction extends ActionType { + public static final CreatePitAction INSTANCE = new CreatePitAction(); + public static final String NAME = "indices:data/read/point_in_time"; + + private CreatePitAction() { + super(NAME, CreatePitResponse::new); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/CreatePitController.java b/server/src/main/java/org/opensearch/action/search/CreatePitController.java new file mode 100644 index 0000000000000..3d2ecc8b695c6 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/CreatePitController.java @@ -0,0 +1,273 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.OpenSearchException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.action.support.GroupedActionListener; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.index.shard.ShardId; +import org.opensearch.search.SearchPhaseResult; +import org.opensearch.search.SearchShardTarget; +import org.opensearch.tasks.Task; +import org.opensearch.transport.Transport; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +import static org.opensearch.common.unit.TimeValue.timeValueSeconds; + +/** + * Controller for creating PIT reader context + * Phase 1 of create PIT request : Create PIT reader contexts in the associated shards with a temporary keep alive + * Phase 2 of create PIT : Update PIT reader context with PIT ID and keep alive from request and + * fail user request if any of the updates in this phase are failed - we clean up PITs in case of such failures. + * This two phase approach is used to save PIT ID as part of context which is later used for other use cases like list PIT etc. + */ +public class CreatePitController { + private final SearchTransportService searchTransportService; + private final ClusterService clusterService; + private final TransportSearchAction transportSearchAction; + private final NamedWriteableRegistry namedWriteableRegistry; + private final Task task; + private final ActionListener listener; + private final CreatePitRequest request; + private static final Logger logger = LogManager.getLogger(CreatePitController.class); + public static final Setting PIT_INIT_KEEP_ALIVE = Setting.positiveTimeSetting( + "pit.init.keep_alive", + timeValueSeconds(30), + Setting.Property.NodeScope + ); + + public CreatePitController( + CreatePitRequest request, + SearchTransportService searchTransportService, + ClusterService clusterService, + TransportSearchAction transportSearchAction, + NamedWriteableRegistry namedWriteableRegistry, + Task task, + ActionListener listener + ) { + this.searchTransportService = searchTransportService; + this.clusterService = clusterService; + this.transportSearchAction = transportSearchAction; + this.namedWriteableRegistry = namedWriteableRegistry; + this.task = task; + this.listener = listener; + this.request = request; + } + + /** + * This method creates PIT reader context + */ + public void executeCreatePit(StepListener createPitListener, ActionListener updatePitIdListener) { + SearchRequest searchRequest = new SearchRequest(request.getIndices()); + searchRequest.preference(request.getPreference()); + searchRequest.routing(request.getRouting()); + searchRequest.indicesOptions(request.getIndicesOptions()); + searchRequest.allowPartialSearchResults(request.shouldAllowPartialPitCreation()); + SearchTask searchTask = searchRequest.createTask( + task.getId(), + task.getType(), + task.getAction(), + task.getParentTaskId(), + Collections.emptyMap() + ); + /** + * Phase 1 of create PIT + */ + executeCreatePit(searchTask, searchRequest, createPitListener); + + /** + * Phase 2 of create PIT where we update pit id in pit contexts + */ + createPitListener.whenComplete( + searchResponse -> { executeUpdatePitId(request, searchRequest, searchResponse, updatePitIdListener); }, + updatePitIdListener::onFailure + ); + } + + /** + * Creates PIT reader context with temporary keep alive + */ + void executeCreatePit(Task task, SearchRequest searchRequest, StepListener createPitListener) { + logger.debug( + () -> new ParameterizedMessage("Executing creation of PIT context for indices [{}]", Arrays.toString(searchRequest.indices())) + ); + transportSearchAction.executeRequest( + task, + searchRequest, + TransportCreatePitAction.CREATE_PIT_ACTION, + true, + new TransportSearchAction.SinglePhaseSearchAction() { + @Override + public void executeOnShardTarget( + SearchTask searchTask, + SearchShardTarget target, + Transport.Connection connection, + ActionListener searchPhaseResultActionListener + ) { + searchTransportService.createPitContext( + connection, + new TransportCreatePitAction.CreateReaderContextRequest( + target.getShardId(), + PIT_INIT_KEEP_ALIVE.get(clusterService.getSettings()) + ), + searchTask, + ActionListener.wrap(r -> searchPhaseResultActionListener.onResponse(r), searchPhaseResultActionListener::onFailure) + ); + } + }, + createPitListener + ); + } + + /** + * Updates PIT ID, keep alive and createdTime of PIT reader context + */ + void executeUpdatePitId( + CreatePitRequest request, + SearchRequest searchRequest, + SearchResponse searchResponse, + ActionListener updatePitIdListener + ) { + logger.debug( + () -> new ParameterizedMessage( + "Updating PIT context with PIT ID [{}], creation time and keep alive", + searchResponse.pointInTimeId() + ) + ); + /** + * store the create time ( same create time for all PIT contexts across shards ) to be used + * for list PIT api + */ + final long relativeStartNanos = System.nanoTime(); + final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( + searchRequest.getOrCreateAbsoluteStartMillis(), + relativeStartNanos, + System::nanoTime + ); + final long creationTime = timeProvider.getAbsoluteStartMillis(); + CreatePitResponse createPITResponse = new CreatePitResponse( + searchResponse.pointInTimeId(), + creationTime, + searchResponse.getTotalShards(), + searchResponse.getSuccessfulShards(), + searchResponse.getSkippedShards(), + searchResponse.getFailedShards(), + searchResponse.getShardFailures() + ); + SearchContextId contextId = SearchContextId.decode(namedWriteableRegistry, createPITResponse.getId()); + final StepListener> lookupListener = getConnectionLookupListener(contextId); + lookupListener.whenComplete(nodelookup -> { + final ActionListener groupedActionListener = getGroupedListener( + updatePitIdListener, + createPITResponse, + contextId.shards().size(), + contextId.shards().values() + ); + for (Map.Entry entry : contextId.shards().entrySet()) { + DiscoveryNode node = nodelookup.apply(entry.getValue().getClusterAlias(), entry.getValue().getNode()); + try { + final Transport.Connection connection = searchTransportService.getConnection(entry.getValue().getClusterAlias(), node); + searchTransportService.updatePitContext( + connection, + new UpdatePitContextRequest( + entry.getValue().getSearchContextId(), + createPITResponse.getId(), + request.getKeepAlive().millis(), + creationTime + ), + groupedActionListener + ); + } catch (Exception e) { + logger.error( + () -> new ParameterizedMessage( + "Create pit update phase failed for PIT ID [{}] on node [{}]", + searchResponse.pointInTimeId(), + node + ), + e + ); + groupedActionListener.onFailure( + new OpenSearchException( + "Create pit update phase for PIT ID [" + searchResponse.pointInTimeId() + "] failed on node[" + node + "]", + e + ) + ); + } + } + }, updatePitIdListener::onFailure); + } + + private StepListener> getConnectionLookupListener(SearchContextId contextId) { + ClusterState state = clusterService.state(); + final Set clusters = contextId.shards() + .values() + .stream() + .filter(ctx -> Strings.isEmpty(ctx.getClusterAlias()) == false) + .map(SearchContextIdForNode::getClusterAlias) + .collect(Collectors.toSet()); + return SearchUtils.getConnectionLookupListener(searchTransportService.getRemoteClusterService(), state, clusters); + } + + private ActionListener getGroupedListener( + ActionListener updatePitIdListener, + CreatePitResponse createPITResponse, + int size, + Collection contexts + ) { + return new GroupedActionListener<>(new ActionListener<>() { + @Override + public void onResponse(final Collection responses) { + updatePitIdListener.onResponse(createPITResponse); + } + + @Override + public void onFailure(final Exception e) { + cleanupContexts(contexts); + updatePitIdListener.onFailure(e); + } + }, size); + } + + /** + * Cleanup all created PIT contexts in case of failure + */ + private void cleanupContexts(Collection contexts) { + ActionListener deleteListener = new ActionListener<>() { + @Override + public void onResponse(Integer freed) { + // log the number of freed contexts - this is invoke and forget call + logger.debug(() -> new ParameterizedMessage("Cleaned up {} contexts out of {}", freed, contexts.size())); + } + + @Override + public void onFailure(Exception e) { + logger.error("Cleaning up PIT contexts failed ", e); + } + }; + ClearScrollController.closeContexts(clusterService.state().getNodes(), searchTransportService, contexts, deleteListener); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/CreatePitRequest.java b/server/src/main/java/org/opensearch/action/search/CreatePitRequest.java new file mode 100644 index 0000000000000..45d6d9e2c9f54 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/CreatePitRequest.java @@ -0,0 +1,195 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.IndicesRequest; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.common.Nullable; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.ToXContent; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskId; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.opensearch.action.ValidateActions.addValidationError; + +/** + * A request to make create point in time against one or more indices. + */ +public class CreatePitRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContent { + + // keep alive for pit reader context + private TimeValue keepAlive; + + // this describes whether PIT can be created with partial failures + private Boolean allowPartialPitCreation; + @Nullable + private String routing = null; + @Nullable + private String preference = null; + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = SearchRequest.DEFAULT_INDICES_OPTIONS; + + public CreatePitRequest(TimeValue keepAlive, Boolean allowPartialPitCreation, String... indices) { + this.keepAlive = keepAlive; + this.allowPartialPitCreation = allowPartialPitCreation; + this.indices = indices; + } + + public CreatePitRequest(StreamInput in) throws IOException { + super(in); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + routing = in.readOptionalString(); + preference = in.readOptionalString(); + keepAlive = in.readTimeValue(); + allowPartialPitCreation = in.readOptionalBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + out.writeOptionalString(routing); + out.writeOptionalString(preference); + out.writeTimeValue(keepAlive); + out.writeOptionalBoolean(allowPartialPitCreation); + } + + public String getRouting() { + return routing; + } + + public String getPreference() { + return preference; + } + + public String[] getIndices() { + return indices; + } + + public IndicesOptions getIndicesOptions() { + return indicesOptions; + } + + public TimeValue getKeepAlive() { + return keepAlive; + } + + /** + * Sets if this request should allow partial results. + */ + public void allowPartialPitCreation(Boolean allowPartialPitCreation) { + this.allowPartialPitCreation = allowPartialPitCreation; + } + + public boolean shouldAllowPartialPitCreation() { + return allowPartialPitCreation; + } + + public void setRouting(String routing) { + this.routing = routing; + } + + public void setPreference(String preference) { + this.preference = preference; + } + + public void setIndices(String[] indices) { + this.indices = indices; + } + + public void setIndicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = Objects.requireNonNull(indicesOptions, "indicesOptions must not be null"); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (keepAlive == null) { + validationException = addValidationError("keep alive not specified", validationException); + } + return validationException; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public CreatePitRequest indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = Objects.requireNonNull(indicesOptions, "indicesOptions must not be null"); + return this; + } + + public void setKeepAlive(TimeValue keepAlive) { + this.keepAlive = keepAlive; + } + + public final String buildDescription() { + StringBuilder sb = new StringBuilder(); + sb.append("indices["); + Strings.arrayToDelimitedString(indices, ",", sb); + sb.append("], "); + sb.append("pointintime[").append(keepAlive).append("], "); + sb.append("allowPartialPitCreation[").append(allowPartialPitCreation).append("], "); + return sb.toString(); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new Task(id, type, action, this.buildDescription(), parentTaskId, headers); + } + + private void validateIndices(String... indices) { + Objects.requireNonNull(indices, "indices must not be null"); + for (String index : indices) { + Objects.requireNonNull(index, "index must not be null"); + } + } + + @Override + public CreatePitRequest indices(String... indices) { + validateIndices(indices); + this.indices = indices; + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("keep_alive", keepAlive); + builder.field("allow_partial_pit_creation", allowPartialPitCreation); + if (indices != null) { + builder.startArray("indices"); + for (String index : indices) { + builder.value(index); + } + builder.endArray(); + } + if (indicesOptions != null) { + indicesOptions.toXContent(builder, params); + } + return builder; + } +} diff --git a/server/src/main/java/org/opensearch/action/search/CreatePitResponse.java b/server/src/main/java/org/opensearch/action/search/CreatePitResponse.java new file mode 100644 index 0000000000000..25eb9aff9e3d7 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/CreatePitResponse.java @@ -0,0 +1,232 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionResponse; +import org.opensearch.common.ParseField; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.StatusToXContentObject; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.rest.RestStatus; +import org.opensearch.rest.action.RestActions; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +/** + * Create point in time response with point in time id and shard success / failures + */ +public class CreatePitResponse extends ActionResponse implements StatusToXContentObject { + private static final ParseField ID = new ParseField("id"); + private static final ParseField CREATION_TIME = new ParseField("creation_time"); + + // point in time id + private final String id; + private final int totalShards; + private final int successfulShards; + private final int failedShards; + private final int skippedShards; + private final ShardSearchFailure[] shardFailures; + private final long creationTime; + + public CreatePitResponse(StreamInput in) throws IOException { + super(in); + id = in.readString(); + totalShards = in.readVInt(); + successfulShards = in.readVInt(); + failedShards = in.readVInt(); + skippedShards = in.readVInt(); + creationTime = in.readLong(); + int size = in.readVInt(); + if (size == 0) { + shardFailures = ShardSearchFailure.EMPTY_ARRAY; + } else { + shardFailures = new ShardSearchFailure[size]; + for (int i = 0; i < shardFailures.length; i++) { + shardFailures[i] = ShardSearchFailure.readShardSearchFailure(in); + } + } + } + + public CreatePitResponse( + String id, + long creationTime, + int totalShards, + int successfulShards, + int skippedShards, + int failedShards, + ShardSearchFailure[] shardFailures + ) { + this.id = id; + this.creationTime = creationTime; + this.totalShards = totalShards; + this.successfulShards = successfulShards; + this.skippedShards = skippedShards; + this.failedShards = failedShards; + this.shardFailures = shardFailures; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ID.getPreferredName(), id); + RestActions.buildBroadcastShardsHeader( + builder, + params, + getTotalShards(), + getSuccessfulShards(), + getSkippedShards(), + getFailedShards(), + getShardFailures() + ); + builder.field(CREATION_TIME.getPreferredName(), creationTime); + builder.endObject(); + return builder; + } + + /** + * Parse the create PIT response body into a new {@link CreatePitResponse} object + */ + public static CreatePitResponse fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + parser.nextToken(); + return innerFromXContent(parser); + } + + public static CreatePitResponse innerFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String currentFieldName = parser.currentName(); + int successfulShards = -1; + int totalShards = -1; + int skippedShards = 0; + int failedShards = 0; + String id = null; + long creationTime = 0; + List failures = new ArrayList<>(); + for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (CREATION_TIME.match(currentFieldName, parser.getDeprecationHandler())) { + creationTime = parser.longValue(); + } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { + id = parser.text(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (RestActions._SHARDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (RestActions.FAILED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + failedShards = parser.intValue(); // we don't need it but need to consume it + } else if (RestActions.SUCCESSFUL_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + successfulShards = parser.intValue(); + } else if (RestActions.TOTAL_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + totalShards = parser.intValue(); + } else if (RestActions.SKIPPED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + skippedShards = parser.intValue(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (RestActions.FAILURES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + failures.add(ShardSearchFailure.fromXContent(parser)); + } + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + } else { + parser.skipChildren(); + } + } + } + + return new CreatePitResponse( + id, + creationTime, + totalShards, + successfulShards, + skippedShards, + failedShards, + failures.toArray(ShardSearchFailure.EMPTY_ARRAY) + ); + } + + public long getCreationTime() { + return creationTime; + } + + /** + * The failed number of shards the search was executed on. + */ + public int getFailedShards() { + return shardFailures.length; + } + + /** + * The failures that occurred during the search. + */ + public ShardSearchFailure[] getShardFailures() { + return this.shardFailures; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeVInt(totalShards); + out.writeVInt(successfulShards); + out.writeVInt(failedShards); + out.writeVInt(skippedShards); + out.writeLong(creationTime); + out.writeVInt(shardFailures.length); + for (ShardSearchFailure shardSearchFailure : shardFailures) { + shardSearchFailure.writeTo(out); + } + } + + public String getId() { + return id; + } + + /** + * The total number of shards the create pit operation was executed on. + */ + public int getTotalShards() { + return totalShards; + } + + /** + * The successful number of shards the create pit operation was executed on. + */ + public int getSuccessfulShards() { + return successfulShards; + } + + public int getSkippedShards() { + return skippedShards; + } + + @Override + public RestStatus status() { + return RestStatus.status(successfulShards, totalShards, shardFailures); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchContextId.java b/server/src/main/java/org/opensearch/action/search/SearchContextId.java index c2bb46a7b0e57..8a9cf1dc9772d 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchContextId.java +++ b/server/src/main/java/org/opensearch/action/search/SearchContextId.java @@ -116,7 +116,7 @@ public static SearchContextId decode(NamedWriteableRegistry namedWriteableRegist } return new SearchContextId(Collections.unmodifiableMap(shards), Collections.unmodifiableMap(aliasFilters)); } catch (IOException e) { - throw new IllegalArgumentException(e); + throw new IllegalArgumentException("invalid id: [" + id + "]", e); } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java index f91276960397a..f41bd7938b3b6 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java @@ -95,6 +95,8 @@ public class SearchTransportService { public static final String FETCH_ID_SCROLL_ACTION_NAME = "indices:data/read/search[phase/fetch/id/scroll]"; public static final String FETCH_ID_ACTION_NAME = "indices:data/read/search[phase/fetch/id]"; public static final String QUERY_CAN_MATCH_NAME = "indices:data/read/search[can_match]"; + public static final String CREATE_READER_CONTEXT_ACTION_NAME = "indices:data/read/search[create_context]"; + public static final String UPDATE_READER_CONTEXT_ACTION_NAME = "indices:data/read/search[update_context]"; private final TransportService transportService; private final BiFunction responseWrapper; @@ -142,6 +144,36 @@ public void sendFreeContext( ); } + public void updatePitContext( + Transport.Connection connection, + UpdatePitContextRequest request, + ActionListener actionListener + ) { + transportService.sendRequest( + connection, + UPDATE_READER_CONTEXT_ACTION_NAME, + request, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(actionListener, UpdatePitContextResponse::new) + ); + } + + public void createPitContext( + Transport.Connection connection, + TransportCreatePitAction.CreateReaderContextRequest request, + SearchTask task, + ActionListener actionListener + ) { + transportService.sendChildRequest( + connection, + CREATE_READER_CONTEXT_ACTION_NAME, + request, + task, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(actionListener, TransportCreatePitAction.CreateReaderContextResponse::new) + ); + } + public void sendCanMatch( Transport.Connection connection, final ShardSearchRequest request, @@ -562,6 +594,49 @@ public static void registerRequestHandler(TransportService transportService, Sea } ); TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, SearchService.CanMatchResponse::new); + transportService.registerRequestHandler( + CREATE_READER_CONTEXT_ACTION_NAME, + ThreadPool.Names.SAME, + TransportCreatePitAction.CreateReaderContextRequest::new, + (request, channel, task) -> { + ChannelActionListener< + TransportCreatePitAction.CreateReaderContextResponse, + TransportCreatePitAction.CreateReaderContextRequest> listener = new ChannelActionListener<>( + channel, + CREATE_READER_CONTEXT_ACTION_NAME, + request + ); + searchService.createPitReaderContext( + request.getShardId(), + request.getKeepAlive(), + ActionListener.wrap( + r -> listener.onResponse(new TransportCreatePitAction.CreateReaderContextResponse(r)), + listener::onFailure + ) + ); + } + ); + TransportActionProxy.registerProxyAction( + transportService, + CREATE_READER_CONTEXT_ACTION_NAME, + TransportCreatePitAction.CreateReaderContextResponse::new + ); + + transportService.registerRequestHandler( + UPDATE_READER_CONTEXT_ACTION_NAME, + ThreadPool.Names.SAME, + UpdatePitContextRequest::new, + (request, channel, task) -> { + ChannelActionListener listener = new ChannelActionListener<>( + channel, + UPDATE_READER_CONTEXT_ACTION_NAME, + request + ); + searchService.updatePitIdAndKeepAlive(request, listener); + } + ); + TransportActionProxy.registerProxyAction(transportService, UPDATE_READER_CONTEXT_ACTION_NAME, UpdatePitContextResponse::new); + } /** diff --git a/server/src/main/java/org/opensearch/action/search/SearchUtils.java b/server/src/main/java/org/opensearch/action/search/SearchUtils.java new file mode 100644 index 0000000000000..148d1645568b1 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/SearchUtils.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.StepListener; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.transport.RemoteClusterService; + +import java.util.Set; +import java.util.function.BiFunction; + +/** + * Helper class for common search functions + */ +public class SearchUtils { + + public SearchUtils() {} + + /** + * Get connection lookup listener for list of clusters passed + */ + public static StepListener> getConnectionLookupListener( + RemoteClusterService remoteClusterService, + ClusterState state, + Set clusters + ) { + final StepListener> lookupListener = new StepListener<>(); + + if (clusters.isEmpty()) { + lookupListener.onResponse((cluster, nodeId) -> state.getNodes().get(nodeId)); + } else { + remoteClusterService.collectNodes(clusters, lookupListener); + } + return lookupListener; + } +} diff --git a/server/src/main/java/org/opensearch/action/search/TransportCreatePitAction.java b/server/src/main/java/org/opensearch/action/search/TransportCreatePitAction.java new file mode 100644 index 0000000000000..3ec821dbed9c4 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/TransportCreatePitAction.java @@ -0,0 +1,139 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.action.ActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.index.shard.ShardId; +import org.opensearch.search.SearchPhaseResult; +import org.opensearch.search.internal.ShardSearchContextId; +import org.opensearch.tasks.Task; +import org.opensearch.transport.TransportRequest; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.Arrays; + +/** + * Transport action for creating PIT reader context + */ +public class TransportCreatePitAction extends HandledTransportAction { + + public static final String CREATE_PIT_ACTION = "create_pit"; + private final TransportService transportService; + private final SearchTransportService searchTransportService; + private final ClusterService clusterService; + private final TransportSearchAction transportSearchAction; + private final NamedWriteableRegistry namedWriteableRegistry; + + @Inject + public TransportCreatePitAction( + TransportService transportService, + ActionFilters actionFilters, + SearchTransportService searchTransportService, + ClusterService clusterService, + TransportSearchAction transportSearchAction, + NamedWriteableRegistry namedWriteableRegistry + ) { + super(CreatePitAction.NAME, transportService, actionFilters, in -> new CreatePitRequest(in)); + this.transportService = transportService; + this.searchTransportService = searchTransportService; + this.clusterService = clusterService; + this.transportSearchAction = transportSearchAction; + this.namedWriteableRegistry = namedWriteableRegistry; + } + + @Override + protected void doExecute(Task task, CreatePitRequest request, ActionListener listener) { + CreatePitController controller = new CreatePitController( + request, + searchTransportService, + clusterService, + transportSearchAction, + namedWriteableRegistry, + task, + listener + ); + final StepListener createPitListener = new StepListener<>(); + final ActionListener updatePitIdListener = ActionListener.wrap(r -> listener.onResponse(r), e -> { + logger.error( + () -> new ParameterizedMessage( + "PIT creation failed while updating PIT ID for indices [{}]", + Arrays.toString(request.indices()) + ) + ); + listener.onFailure(e); + }); + controller.executeCreatePit(createPitListener, updatePitIdListener); + } + + /** + * Request to create pit reader context with keep alive + */ + public static class CreateReaderContextRequest extends TransportRequest { + private final ShardId shardId; + private final TimeValue keepAlive; + + public CreateReaderContextRequest(ShardId shardId, TimeValue keepAlive) { + this.shardId = shardId; + this.keepAlive = keepAlive; + } + + public ShardId getShardId() { + return shardId; + } + + public TimeValue getKeepAlive() { + return keepAlive; + } + + public CreateReaderContextRequest(StreamInput in) throws IOException { + super(in); + this.shardId = new ShardId(in); + this.keepAlive = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + shardId.writeTo(out); + out.writeTimeValue(keepAlive); + } + } + + /** + * Create pit reader context response which holds the contextId + */ + public static class CreateReaderContextResponse extends SearchPhaseResult { + public CreateReaderContextResponse(ShardSearchContextId shardSearchContextId) { + this.contextId = shardSearchContextId; + } + + public CreateReaderContextResponse(StreamInput in) throws IOException { + super(in); + contextId = new ShardSearchContextId(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + contextId.writeTo(out); + } + } + +} diff --git a/server/src/main/java/org/opensearch/action/search/UpdatePitContextRequest.java b/server/src/main/java/org/opensearch/action/search/UpdatePitContextRequest.java new file mode 100644 index 0000000000000..e6c9befb7938f --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/UpdatePitContextRequest.java @@ -0,0 +1,67 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.search.internal.ShardSearchContextId; +import org.opensearch.transport.TransportRequest; + +import java.io.IOException; + +/** + * Request used to update PIT reader contexts with pitId, keepAlive and creationTime + */ +public class UpdatePitContextRequest extends TransportRequest { + private final String pitId; + private final long keepAlive; + + private final long creationTime; + private final ShardSearchContextId searchContextId; + + public UpdatePitContextRequest(ShardSearchContextId searchContextId, String pitId, long keepAlive, long creationTime) { + this.pitId = pitId; + this.searchContextId = searchContextId; + this.keepAlive = keepAlive; + this.creationTime = creationTime; + } + + UpdatePitContextRequest(StreamInput in) throws IOException { + super(in); + pitId = in.readString(); + keepAlive = in.readLong(); + creationTime = in.readLong(); + searchContextId = new ShardSearchContextId(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(pitId); + out.writeLong(keepAlive); + out.writeLong(creationTime); + searchContextId.writeTo(out); + } + + public ShardSearchContextId getSearchContextId() { + return searchContextId; + } + + public String getPitId() { + return pitId; + } + + public long getCreationTime() { + return creationTime; + } + + public long getKeepAlive() { + return keepAlive; + } +} diff --git a/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java b/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java new file mode 100644 index 0000000000000..919dd87ea3041 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java @@ -0,0 +1,58 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.transport.TransportResponse; + +import java.io.IOException; + +/** + * Update PIT context response with creation time, keep alive etc. + */ +public class UpdatePitContextResponse extends TransportResponse { + private final String pitId; + + private final long creationTime; + + private final long keepAlive; + + UpdatePitContextResponse(StreamInput in) throws IOException { + super(in); + pitId = in.readString(); + creationTime = in.readLong(); + keepAlive = in.readLong(); + } + + public UpdatePitContextResponse(String pitId, long creationTime, long keepAlive) { + this.pitId = pitId; + this.keepAlive = keepAlive; + this.creationTime = creationTime; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(pitId); + out.writeLong(creationTime); + out.writeLong(keepAlive); + } + + public String getPitId() { + return pitId; + } + + public long getKeepAlive() { + return keepAlive; + } + + public long getCreationTime() { + return creationTime; + } +} diff --git a/server/src/main/java/org/opensearch/client/Client.java b/server/src/main/java/org/opensearch/client/Client.java index 50f8f52253815..a73f8200ab277 100644 --- a/server/src/main/java/org/opensearch/client/Client.java +++ b/server/src/main/java/org/opensearch/client/Client.java @@ -58,6 +58,8 @@ import org.opensearch.action.search.ClearScrollRequest; import org.opensearch.action.search.ClearScrollRequestBuilder; import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchRequestBuilder; import org.opensearch.action.search.MultiSearchResponse; @@ -325,6 +327,11 @@ public interface Client extends OpenSearchClient, Releasable { */ SearchScrollRequestBuilder prepareSearchScroll(String scrollId); + /** + * Create point in time for one or more indices + */ + void createPit(CreatePitRequest createPITRequest, ActionListener listener); + /** * Performs multiple search requests. */ diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 4fdf4b1166bd6..6cc0827310bd1 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -324,6 +324,9 @@ import org.opensearch.action.search.ClearScrollRequest; import org.opensearch.action.search.ClearScrollRequestBuilder; import org.opensearch.action.search.ClearScrollResponse; +import org.opensearch.action.search.CreatePitAction; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchRequestBuilder; @@ -574,6 +577,11 @@ public SearchScrollRequestBuilder prepareSearchScroll(String scrollId) { return new SearchScrollRequestBuilder(this, SearchScrollAction.INSTANCE, scrollId); } + @Override + public void createPit(final CreatePitRequest createPITRequest, final ActionListener listener) { + execute(CreatePitAction.INSTANCE, createPITRequest, listener); + } + @Override public ActionFuture multiSearch(MultiSearchRequest request) { return execute(MultiSearchAction.INSTANCE, request); diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index be92bf1643aee..1d0039c26670a 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -32,6 +32,7 @@ package org.opensearch.common.settings; import org.apache.logging.log4j.LogManager; +import org.opensearch.action.search.CreatePitController; import org.opensearch.cluster.routing.allocation.decider.NodeLoadAwareAllocationDecider; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; @@ -464,6 +465,9 @@ public void apply(Settings value, Settings current, Settings previous) { SearchService.KEEPALIVE_INTERVAL_SETTING, SearchService.MAX_KEEPALIVE_SETTING, SearchService.ALLOW_EXPENSIVE_QUERIES, + SearchService.MAX_OPEN_PIT_CONTEXT, + SearchService.MAX_PIT_KEEPALIVE_SETTING, + CreatePitController.PIT_INIT_KEEP_ALIVE, MultiBucketConsumerService.MAX_BUCKET_SETTING, SearchService.LOW_LEVEL_CANCELLATION_SETTING, SearchService.MAX_OPEN_SCROLL_CONTEXT, diff --git a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java index ba2666b53d7a8..3eb68a7686c96 100644 --- a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java @@ -149,6 +149,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexSettings.INDEX_CHECK_ON_STARTUP, IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD, IndexSettings.MAX_SLICES_PER_SCROLL, + IndexSettings.MAX_SLICES_PER_PIT, IndexSettings.MAX_REGEX_LENGTH_SETTING, ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING, IndexSettings.INDEX_GC_DELETES_SETTING, diff --git a/server/src/main/java/org/opensearch/index/IndexSettings.java b/server/src/main/java/org/opensearch/index/IndexSettings.java index e40acb94ee498..2da9fc0c6d995 100644 --- a/server/src/main/java/org/opensearch/index/IndexSettings.java +++ b/server/src/main/java/org/opensearch/index/IndexSettings.java @@ -451,6 +451,17 @@ public final class IndexSettings { Property.IndexScope ); + /** + * The maximum number of slices allowed in a search request with PIT + */ + public static final Setting MAX_SLICES_PER_PIT = Setting.intSetting( + "index.max_slices_per_pit", + 1024, + 1, + Property.Dynamic, + Property.IndexScope + ); + /** * The maximum length of regex string allowed in a regexp query. */ @@ -603,6 +614,10 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { * The maximum number of slices allowed in a scroll request. */ private volatile int maxSlicesPerScroll; + /** + * The maximum number of slices allowed in a PIT request. + */ + private volatile int maxSlicesPerPit; /** * The maximum length of regex string allowed in a regexp query. @@ -717,6 +732,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti maxShingleDiff = scopedSettings.get(MAX_SHINGLE_DIFF_SETTING); maxRefreshListeners = scopedSettings.get(MAX_REFRESH_LISTENERS_PER_SHARD); maxSlicesPerScroll = scopedSettings.get(MAX_SLICES_PER_SCROLL); + maxSlicesPerPit = scopedSettings.get(MAX_SLICES_PER_PIT); maxAnalyzedOffset = scopedSettings.get(MAX_ANALYZED_OFFSET_SETTING); maxTermsCount = scopedSettings.get(MAX_TERMS_COUNT_SETTING); maxRegexLength = scopedSettings.get(MAX_REGEX_LENGTH_SETTING); @@ -789,6 +805,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti scopedSettings.addSettingsUpdateConsumer(MAX_ANALYZED_OFFSET_SETTING, this::setHighlightMaxAnalyzedOffset); scopedSettings.addSettingsUpdateConsumer(MAX_TERMS_COUNT_SETTING, this::setMaxTermsCount); scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_SCROLL, this::setMaxSlicesPerScroll); + scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_PIT, this::setMaxSlicesPerPit); scopedSettings.addSettingsUpdateConsumer(DEFAULT_FIELD_SETTING, this::setDefaultFields); scopedSettings.addSettingsUpdateConsumer(INDEX_SEARCH_IDLE_AFTER, this::setSearchIdleAfter); scopedSettings.addSettingsUpdateConsumer(MAX_REGEX_LENGTH_SETTING, this::setMaxRegexLength); @@ -1249,6 +1266,17 @@ public int getMaxSlicesPerScroll() { return maxSlicesPerScroll; } + /** + * The maximum number of slices allowed in a PIT request. + */ + public int getMaxSlicesPerPit() { + return maxSlicesPerPit; + } + + private void setMaxSlicesPerPit(int value) { + this.maxSlicesPerPit = value; + } + private void setMaxSlicesPerScroll(int value) { this.maxSlicesPerScroll = value; } diff --git a/server/src/main/java/org/opensearch/index/shard/SearchOperationListener.java b/server/src/main/java/org/opensearch/index/shard/SearchOperationListener.java index d3177055a5bd8..0a7c80f5e87d3 100644 --- a/server/src/main/java/org/opensearch/index/shard/SearchOperationListener.java +++ b/server/src/main/java/org/opensearch/index/shard/SearchOperationListener.java @@ -131,6 +131,19 @@ default void onFreeScrollContext(ReaderContext readerContext) {} */ default void validateReaderContext(ReaderContext readerContext, TransportRequest transportRequest) {} + /** + * Executed when a new Point-In-Time {@link ReaderContext} was created + * @param readerContext the created reader context + */ + default void onNewPitContext(ReaderContext readerContext) {} + + /** + * Executed when a Point-In-Time search {@link SearchContext} is freed. + * This happens on deletion of a Point-In-Time or on it's keep-alive is expiring. + * @param readerContext the freed search context + */ + default void onFreePitContext(ReaderContext readerContext) {} + /** * A Composite listener that multiplexes calls to each of the listeners methods. */ @@ -265,5 +278,36 @@ public void validateReaderContext(ReaderContext readerContext, TransportRequest } ExceptionsHelper.reThrowIfNotNull(exception); } + + /** + * Executed when a new Point-In-Time {@link ReaderContext} was created + * @param readerContext the created reader context + */ + @Override + public void onNewPitContext(ReaderContext readerContext) { + for (SearchOperationListener listener : listeners) { + try { + listener.onNewPitContext(readerContext); + } catch (Exception e) { + logger.warn("onNewPitContext listener failed", e); + } + } + } + + /** + * Executed when a Point-In-Time search {@link SearchContext} is freed. + * This happens on deletion of a Point-In-Time or on it's keep-alive is expiring. + * @param readerContext the freed search context + */ + @Override + public void onFreePitContext(ReaderContext readerContext) { + for (SearchOperationListener listener : listeners) { + try { + listener.onFreePitContext(readerContext); + } catch (Exception e) { + logger.warn("onFreePitContext listener failed", e); + } + } + } } } diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestCreatePitAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestCreatePitAction.java new file mode 100644 index 0000000000000..9439670880015 --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/action/search/RestCreatePitAction.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.rest.action.search; + +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.Strings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestStatusToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static java.util.Arrays.asList; +import static java.util.Collections.unmodifiableList; +import static org.opensearch.rest.RestRequest.Method.POST; + +/** + * Rest action for creating PIT context + */ +public class RestCreatePitAction extends BaseRestHandler { + public static String ALLOW_PARTIAL_PIT_CREATION = "allow_partial_pit_creation"; + public static String KEEP_ALIVE = "keep_alive"; + + @Override + public String getName() { + return "create_pit_action"; + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + boolean allowPartialPitCreation = request.paramAsBoolean(ALLOW_PARTIAL_PIT_CREATION, true); + String[] indices = Strings.splitStringByCommaToArray(request.param("index")); + TimeValue keepAlive = request.paramAsTime(KEEP_ALIVE, null); + CreatePitRequest createPitRequest = new CreatePitRequest(keepAlive, allowPartialPitCreation, indices); + createPitRequest.setIndicesOptions(IndicesOptions.fromRequest(request, createPitRequest.indicesOptions())); + createPitRequest.setPreference(request.param("preference")); + createPitRequest.setRouting(request.param("routing")); + + return channel -> client.createPit(createPitRequest, new RestStatusToXContentListener<>(channel)); + } + + @Override + public List routes() { + return unmodifiableList(asList(new Route(POST, "/{index}/_search/point_in_time"))); + } + +} diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index d09143e3373b4..e6c9a6d0e37e9 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -75,6 +75,7 @@ import org.opensearch.search.fetch.subphase.ScriptFieldsContext; import org.opensearch.search.fetch.subphase.highlight.SearchHighlightContext; import org.opensearch.search.internal.ContextIndexSearcher; +import org.opensearch.search.internal.PitReaderContext; import org.opensearch.search.internal.ReaderContext; import org.opensearch.search.internal.ScrollContext; import org.opensearch.search.internal.SearchContext; @@ -287,7 +288,7 @@ public void preProcess(boolean rewrite) { } } - if (sliceBuilder != null) { + if (sliceBuilder != null && scrollContext() != null) { int sliceLimit = indexService.getIndexSettings().getMaxSlicesPerScroll(); int numSlices = sliceBuilder.getMax(); if (numSlices > sliceLimit) { @@ -304,6 +305,23 @@ public void preProcess(boolean rewrite) { } } + if (sliceBuilder != null && readerContext != null && readerContext instanceof PitReaderContext) { + int sliceLimit = indexService.getIndexSettings().getMaxSlicesPerPit(); + int numSlices = sliceBuilder.getMax(); + if (numSlices > sliceLimit) { + throw new IllegalArgumentException( + "The number of slices [" + + numSlices + + "] is too large. It must " + + "be less than [" + + sliceLimit + + "]. This limit can be set by changing the [" + + IndexSettings.MAX_SLICES_PER_PIT.getKey() + + "] index level setting." + ); + } + } + // initialize the filtering alias based on the provided filters try { final QueryBuilder queryBuilder = request.getAliasFilter().getQueryBuilder(); diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 3b24d52bebe53..0f7a39a31535e 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -44,6 +44,8 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchShardTask; import org.opensearch.action.search.SearchType; +import org.opensearch.action.search.UpdatePitContextRequest; +import org.opensearch.action.search.UpdatePitContextResponse; import org.opensearch.action.support.TransportActions; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.service.ClusterService; @@ -111,6 +113,7 @@ import org.opensearch.search.internal.AliasFilter; import org.opensearch.search.internal.InternalScrollSearchRequest; import org.opensearch.search.internal.LegacyReaderContext; +import org.opensearch.search.internal.PitReaderContext; import org.opensearch.search.internal.ReaderContext; import org.opensearch.search.internal.SearchContext; import org.opensearch.search.internal.ShardSearchContextId; @@ -172,6 +175,15 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv Property.NodeScope, Property.Dynamic ); + /** + * This setting will help validate the max keep alive that can be set during creation or extension for a PIT reader context + */ + public static final Setting MAX_PIT_KEEPALIVE_SETTING = Setting.positiveTimeSetting( + "pit.max_keep_alive", + timeValueHours(24), + Property.NodeScope, + Property.Dynamic + ); public static final Setting KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting( "search.keep_alive_interval", timeValueMinutes(1), @@ -218,6 +230,19 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv Property.NodeScope ); + /** + * This setting defines the maximum number of active PIT reader contexts in the node , since each PIT context + * has a resource cost attached to it. This setting is less than scroll since users are + * encouraged to share the PIT details. + */ + public static final Setting MAX_OPEN_PIT_CONTEXT = Setting.intSetting( + "search.max_open_pit_context", + 300, + 0, + Property.Dynamic, + Property.NodeScope + ); + public static final int DEFAULT_SIZE = 10; public static final int DEFAULT_FROM = 0; @@ -243,6 +268,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private volatile long maxKeepAlive; + private volatile long maxPitKeepAlive; + private volatile TimeValue defaultSearchTimeout; private volatile boolean defaultAllowPartialSearchResults; @@ -251,6 +278,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private volatile int maxOpenScrollContext; + private volatile int maxOpenPitContext; + private final Cancellable keepAliveReaper; private final AtomicLong idGenerator = new AtomicLong(); @@ -259,6 +288,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final MultiBucketConsumerService multiBucketConsumerService; + private final AtomicInteger openPitContexts = new AtomicInteger(); private final AtomicInteger openScrollContexts = new AtomicInteger(); private final String sessionId = UUIDs.randomBase64UUID(); private final Executor indexSearcherExecutor; @@ -293,9 +323,16 @@ public SearchService( TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings); setKeepAlives(DEFAULT_KEEPALIVE_SETTING.get(settings), MAX_KEEPALIVE_SETTING.get(settings)); - clusterService.getClusterSettings() .addSettingsUpdateConsumer(DEFAULT_KEEPALIVE_SETTING, MAX_KEEPALIVE_SETTING, this::setKeepAlives, this::validateKeepAlives); + setPitKeepAlives(DEFAULT_KEEPALIVE_SETTING.get(settings), MAX_PIT_KEEPALIVE_SETTING.get(settings)); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer( + DEFAULT_KEEPALIVE_SETTING, + MAX_PIT_KEEPALIVE_SETTING, + this::setPitKeepAlives, + this::validatePitKeepAlives + ); this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval, Names.SAME); @@ -309,6 +346,9 @@ public SearchService( maxOpenScrollContext = MAX_OPEN_SCROLL_CONTEXT.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_OPEN_SCROLL_CONTEXT, this::setMaxOpenScrollContext); + maxOpenPitContext = MAX_OPEN_PIT_CONTEXT.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_OPEN_PIT_CONTEXT, this::setMaxOpenPitContext); + lowLevelCancellation = LOW_LEVEL_CANCELLATION_SETTING.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(LOW_LEVEL_CANCELLATION_SETTING, this::setLowLevelCancellation); } @@ -331,12 +371,38 @@ private void validateKeepAlives(TimeValue defaultKeepAlive, TimeValue maxKeepAli } } + /** + * Default keep alive search setting should be less than max PIT keep alive + */ + private void validatePitKeepAlives(TimeValue defaultKeepAlive, TimeValue maxPitKeepAlive) { + if (defaultKeepAlive.millis() > maxPitKeepAlive.millis()) { + throw new IllegalArgumentException( + "Default keep alive setting for request [" + + DEFAULT_KEEPALIVE_SETTING.getKey() + + "]" + + " should be smaller than max keep alive for PIT [" + + MAX_PIT_KEEPALIVE_SETTING.getKey() + + "], " + + "was (" + + defaultKeepAlive + + " > " + + maxPitKeepAlive + + ")" + ); + } + } + private void setKeepAlives(TimeValue defaultKeepAlive, TimeValue maxKeepAlive) { validateKeepAlives(defaultKeepAlive, maxKeepAlive); this.defaultKeepAlive = defaultKeepAlive.millis(); this.maxKeepAlive = maxKeepAlive.millis(); } + private void setPitKeepAlives(TimeValue defaultKeepAlive, TimeValue maxPitKeepAlive) { + validatePitKeepAlives(defaultKeepAlive, maxPitKeepAlive); + this.maxPitKeepAlive = maxPitKeepAlive.millis(); + } + private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) { this.defaultSearchTimeout = defaultSearchTimeout; } @@ -353,6 +419,10 @@ private void setMaxOpenScrollContext(int maxOpenScrollContext) { this.maxOpenScrollContext = maxOpenScrollContext; } + private void setMaxOpenPitContext(int maxOpenPitContext) { + this.maxOpenPitContext = maxOpenPitContext; + } + private void setLowLevelCancellation(Boolean lowLevelCancellation) { this.lowLevelCancellation = lowLevelCancellation; } @@ -793,8 +863,8 @@ final ReaderContext createAndPutReaderContext( * Opens the reader context for given shardId. The newly opened reader context will be keep * until the {@code keepAlive} elapsed unless it is manually released. */ - public void openReaderContext(ShardId shardId, TimeValue keepAlive, ActionListener listener) { - checkKeepAliveLimit(keepAlive.millis()); + public void createPitReaderContext(ShardId shardId, TimeValue keepAlive, ActionListener listener) { + checkPitKeepAliveLimit(keepAlive.millis()); final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexShard shard = indexService.getShard(shardId.id()); final SearchOperationListener searchOperationListener = shard.getSearchOperationListener(); @@ -802,13 +872,31 @@ public void openReaderContext(ShardId shardId, TimeValue keepAlive, ActionListen Engine.SearcherSupplier searcherSupplier = null; ReaderContext readerContext = null; try { + if (openPitContexts.incrementAndGet() > maxOpenPitContext) { + throw new OpenSearchRejectedExecutionException( + "Trying to create too many Point In Time contexts. Must be less than or equal to: [" + + maxOpenPitContext + + "]. " + + "This limit can be set by changing the [" + + MAX_OPEN_PIT_CONTEXT.getKey() + + "] setting." + ); + } searcherSupplier = shard.acquireSearcherSupplier(); final ShardSearchContextId id = new ShardSearchContextId(sessionId, idGenerator.incrementAndGet()); - readerContext = new ReaderContext(id, indexService, shard, searcherSupplier, keepAlive.millis(), false); + readerContext = new PitReaderContext(id, indexService, shard, searcherSupplier, keepAlive.millis(), false); final ReaderContext finalReaderContext = readerContext; searcherSupplier = null; // transfer ownership to reader context + searchOperationListener.onNewReaderContext(readerContext); - readerContext.addOnClose(() -> searchOperationListener.onFreeReaderContext(finalReaderContext)); + searchOperationListener.onNewPitContext(finalReaderContext); + + readerContext.addOnClose(() -> { + openPitContexts.decrementAndGet(); + searchOperationListener.onFreeReaderContext(finalReaderContext); + searchOperationListener.onFreePitContext(finalReaderContext); + }); + // add the newly created pit reader context to active readers putReaderContext(readerContext); readerContext = null; listener.onResponse(finalReaderContext.id()); @@ -932,6 +1020,29 @@ public boolean freeReaderContext(ShardSearchContextId contextId) { return false; } + /** + * Update PIT reader with pit id, keep alive and created time etc + */ + public void updatePitIdAndKeepAlive(UpdatePitContextRequest request, ActionListener listener) { + checkPitKeepAliveLimit(request.getKeepAlive()); + PitReaderContext readerContext = getPitReaderContext(request.getSearchContextId()); + if (readerContext == null) { + throw new SearchContextMissingException(request.getSearchContextId()); + } + Releasable updatePit = null; + try { + updatePit = readerContext.updatePitIdAndKeepAlive(request.getKeepAlive(), request.getPitId(), request.getCreationTime()); + listener.onResponse(new UpdatePitContextResponse(request.getPitId(), request.getCreationTime(), request.getKeepAlive())); + } catch (Exception e) { + freeReaderContext(readerContext.id()); + listener.onFailure(e); + } finally { + if (updatePit != null) { + updatePit.close(); + } + } + } + public void freeAllScrollContexts() { for (ReaderContext readerContext : activeReaders.values()) { if (readerContext.scrollContext() != null) { @@ -944,7 +1055,11 @@ private long getKeepAlive(ShardSearchRequest request) { if (request.scroll() != null) { return getScrollKeepAlive(request.scroll()); } else if (request.keepAlive() != null) { - checkKeepAliveLimit(request.keepAlive().millis()); + if (getReaderContext(request.readerId()) instanceof PitReaderContext) { + checkPitKeepAliveLimit(request.keepAlive().millis()); + } else { + checkKeepAliveLimit(request.keepAlive().millis()); + } return request.keepAlive().getMillis(); } else { return request.readerId() == null ? defaultKeepAlive : -1; @@ -975,6 +1090,25 @@ private void checkKeepAliveLimit(long keepAlive) { } } + /** + * check if request keep alive is greater than max keep alive + */ + private void checkPitKeepAliveLimit(long keepAlive) { + if (keepAlive > maxPitKeepAlive) { + throw new IllegalArgumentException( + "Keep alive for request (" + + TimeValue.timeValueMillis(keepAlive) + + ") is too large. " + + "It must be less than (" + + TimeValue.timeValueMillis(maxPitKeepAlive) + + "). " + + "This limit can be set by changing the [" + + MAX_PIT_KEEPALIVE_SETTING.getKey() + + "] cluster level setting." + ); + } + } + private ActionListener wrapFailureListener(ActionListener listener, ReaderContext context, Releasable releasable) { return new ActionListener() { @Override @@ -1165,8 +1299,8 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc } if (source.slice() != null) { - if (context.scrollContext() == null) { - throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context"); + if (context.scrollContext() == null && !(context.readerContext() instanceof PitReaderContext)) { + throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context or PIT context"); } context.sliceBuilder(source.slice()); } @@ -1261,6 +1395,14 @@ public ResponseCollectorService getResponseCollectorService() { return this.responseCollectorService; } + public PitReaderContext getPitReaderContext(ShardSearchContextId id) { + ReaderContext context = activeReaders.get(id.getId()); + if (context instanceof PitReaderContext) { + return (PitReaderContext) context; + } + return null; + } + class Reaper implements Runnable { @Override public void run() { diff --git a/server/src/main/java/org/opensearch/search/internal/PitReaderContext.java b/server/src/main/java/org/opensearch/search/internal/PitReaderContext.java new file mode 100644 index 0000000000000..43ca7e0ebd823 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/internal/PitReaderContext.java @@ -0,0 +1,70 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.internal; + +import org.apache.lucene.util.SetOnce; +import org.opensearch.common.lease.Releasable; +import org.opensearch.common.lease.Releasables; +import org.opensearch.index.IndexService; +import org.opensearch.index.engine.Engine; +import org.opensearch.index.shard.IndexShard; + +/** + * PIT reader context containing PIT specific information such as pit id, create time etc. + */ +public class PitReaderContext extends ReaderContext { + + // Storing the encoded PIT ID as part of PIT reader context for use cases such as list pit API + private final SetOnce pitId = new SetOnce<>(); + // Creation time of PIT contexts which helps users to differentiate between multiple PIT reader contexts + private final SetOnce creationTime = new SetOnce<>(); + + public PitReaderContext( + ShardSearchContextId id, + IndexService indexService, + IndexShard indexShard, + Engine.SearcherSupplier searcherSupplier, + long keepAliveInMillis, + boolean singleSession + ) { + super(id, indexService, indexShard, searcherSupplier, keepAliveInMillis, singleSession); + } + + public String getPitId() { + return this.pitId.get(); + } + + public void setPitId(final String pitId) { + this.pitId.set(pitId); + } + + /** + * Returns a releasable to indicate that the caller has stopped using this reader. + * The pit id can be updated and time to live of the reader usage can be extended using the provided + * keepAliveInMillis. + */ + public Releasable updatePitIdAndKeepAlive(long keepAliveInMillis, String pitId, long createTime) { + getRefCounted().incRef(); + tryUpdateKeepAlive(keepAliveInMillis); + setPitId(pitId); + setCreationTime(createTime); + return Releasables.releaseOnce(() -> { + getLastAccessTime().updateAndGet(curr -> Math.max(curr, nowInMillis())); + getRefCounted().decRef(); + }); + } + + public long getCreationTime() { + return this.creationTime.get(); + } + + public void setCreationTime(final long creationTime) { + this.creationTime.set(creationTime); + } +} diff --git a/server/src/main/java/org/opensearch/search/internal/ReaderContext.java b/server/src/main/java/org/opensearch/search/internal/ReaderContext.java index 5bcc491f4ffdb..04791e05f603c 100644 --- a/server/src/main/java/org/opensearch/search/internal/ReaderContext.java +++ b/server/src/main/java/org/opensearch/search/internal/ReaderContext.java @@ -105,10 +105,18 @@ public void validate(TransportRequest request) { indexShard.getSearchOperationListener().validateReaderContext(this, request); } - private long nowInMillis() { + protected long nowInMillis() { return indexShard.getThreadPool().relativeTimeInMillis(); } + protected AbstractRefCounted getRefCounted() { + return refCounted; + } + + protected AtomicLong getLastAccessTime() { + return lastAccessTime; + } + @Override public final void close() { if (closed.compareAndSet(false, true)) { @@ -140,7 +148,10 @@ public Engine.Searcher acquireSearcher(String source) { return searcherSupplier.acquireSearcher(source); } - private void tryUpdateKeepAlive(long keepAlive) { + /** + * Update keep alive if it is greater than current keep alive + */ + public void tryUpdateKeepAlive(long keepAlive) { this.keepAlive.updateAndGet(curr -> Math.max(curr, keepAlive)); } diff --git a/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java new file mode 100644 index 0000000000000..f07bbe5975535 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/search/CreatePitControllerTests.java @@ -0,0 +1,592 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.opensearch.Version; +import org.opensearch.action.ActionListener; +import org.opensearch.action.LatchedActionListener; +import org.opensearch.action.StepListener; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.concurrent.AtomicArray; +import org.opensearch.index.query.IdsQueryBuilder; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.shard.ShardId; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.SearchPhaseResult; +import org.opensearch.search.SearchShardTarget; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.internal.AliasFilter; +import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.search.internal.ShardSearchContextId; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskId; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.transport.MockTransportService; +import org.opensearch.threadpool.TestThreadPool; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.RemoteClusterConnectionTests; +import org.opensearch.transport.Transport; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * Functional tests for various methods in create pit controller. Covers update pit phase specifically since + * integration tests don't cover it. + */ +public class CreatePitControllerTests extends OpenSearchTestCase { + + DiscoveryNode node1 = null; + DiscoveryNode node2 = null; + DiscoveryNode node3 = null; + String pitId = null; + TransportSearchAction transportSearchAction = null; + Task task = null; + DiscoveryNodes nodes = null; + NamedWriteableRegistry namedWriteableRegistry = null; + SearchResponse searchResponse = null; + ActionListener createPitListener = null; + ClusterService clusterServiceMock = null; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } + + private MockTransportService startTransport(String id, List knownNodes, Version version) { + return startTransport(id, knownNodes, version, Settings.EMPTY); + } + + private MockTransportService startTransport( + final String id, + final List knownNodes, + final Version version, + final Settings settings + ) { + return RemoteClusterConnectionTests.startTransport(id, knownNodes, version, threadPool, settings); + } + + @Before + public void setupData() { + node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); + node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); + setPitId(); + namedWriteableRegistry = new NamedWriteableRegistry( + Arrays.asList( + new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, MatchAllQueryBuilder.NAME, MatchAllQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, IdsQueryBuilder.NAME, IdsQueryBuilder::new) + ) + ); + nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); + transportSearchAction = mock(TransportSearchAction.class); + task = new Task( + randomLong(), + "transport", + SearchAction.NAME, + "description", + new TaskId(randomLong() + ":" + randomLong()), + Collections.emptyMap() + ); + InternalSearchResponse response = new InternalSearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), + InternalAggregations.EMPTY, + null, + null, + false, + null, + 1 + ); + searchResponse = new SearchResponse( + response, + null, + 3, + 3, + 0, + 100, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY, + pitId + ); + createPitListener = new ActionListener() { + @Override + public void onResponse(CreatePitResponse createPITResponse) { + assertEquals(3, createPITResponse.getTotalShards()); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + + clusterServiceMock = mock(ClusterService.class); + ClusterState state = mock(ClusterState.class); + + final Settings keepAliveSettings = Settings.builder().put(CreatePitController.PIT_INIT_KEEP_ALIVE.getKey(), 30000).build(); + when(clusterServiceMock.getSettings()).thenReturn(keepAliveSettings); + + when(state.getMetadata()).thenReturn(Metadata.EMPTY_METADATA); + when(state.metadata()).thenReturn(Metadata.EMPTY_METADATA); + when(clusterServiceMock.state()).thenReturn(state); + when(state.getNodes()).thenReturn(nodes); + } + + /** + * Test if transport call for update pit is made to all nodes present as part of PIT ID returned from phase one of create pit + */ + public void testUpdatePitAfterCreatePitSuccess() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) + ) { + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePitContextRequest request, + ActionListener listener + ) { + updateNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new UpdatePitContextResponse("pitid", 500000, 500000))); + t.start(); + } + + /** + * Test if cleanup request is called + */ + @Override + public void sendFreeContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + + CountDownLatch latch = new CountDownLatch(1); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + CreatePitController controller = new CreatePitController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePitResponse createPITResponse) { + assertEquals(3, createPITResponse.getTotalShards()); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }, latch); + + StepListener createListener = new StepListener<>(); + controller.executeCreatePit(createListener, updatelistener); + createListener.onResponse(searchResponse); + latch.await(); + assertEquals(3, updateNodesInvoked.size()); + assertEquals(0, deleteNodesInvoked.size()); + } + } + } + + /** + * If create phase results in failure, update pit phase should not proceed and propagate the exception + */ + public void testUpdatePitAfterCreatePitFailure() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) + ) { + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePitContextRequest request, + ActionListener listener + ) { + updateNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new UpdatePitContextResponse("pitid", 500000, 500000))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + + @Override + public void sendFreeContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + }; + + CountDownLatch latch = new CountDownLatch(1); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + + CreatePitController controller = new CreatePitController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePitResponse createPITResponse) { + throw new AssertionError("on response is called"); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e.getCause().getMessage().contains("Exception occurred in phase 1")); + } + }, latch); + + StepListener createListener = new StepListener<>(); + + controller.executeCreatePit(createListener, updatelistener); + createListener.onFailure(new Exception("Exception occurred in phase 1")); + latch.await(); + assertEquals(0, updateNodesInvoked.size()); + /** + * cleanup is not called on create pit phase one failure + */ + assertEquals(0, deleteNodesInvoked.size()); + } + } + } + + /** + * Testing that any update pit failures fails the request + */ + public void testUpdatePitFailureForNodeDrop() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) + ) { + transportService.start(); + transportService.acceptIncomingRequests(); + + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePitContextRequest request, + ActionListener listener + ) { + + updateNodesInvoked.add(connection.getNode()); + if (connection.getNode().getId() == "node_3") { + Thread t = new Thread(() -> listener.onFailure(new Exception("node 3 down"))); + t.start(); + } else { + Thread t = new Thread(() -> listener.onResponse(new UpdatePitContextResponse("pitid", 500000, 500000))); + t.start(); + } + } + + @Override + public void sendFreeContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + CreatePitController controller = new CreatePitController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + CountDownLatch latch = new CountDownLatch(1); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePitResponse createPITResponse) { + throw new AssertionError("response is called"); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e.getMessage().contains("node 3 down")); + } + }, latch); + + StepListener createListener = new StepListener<>(); + controller.executeCreatePit(createListener, updatelistener); + createListener.onResponse(searchResponse); + latch.await(); + assertEquals(3, updateNodesInvoked.size()); + /** + * check if cleanup is called for all nodes in case of update pit failure + */ + assertEquals(3, deleteNodesInvoked.size()); + } + } + } + + public void testUpdatePitFailureWhereAllNodesDown() throws InterruptedException { + List updateNodesInvoked = new CopyOnWriteArrayList<>(); + List deleteNodesInvoked = new CopyOnWriteArrayList<>(); + List knownNodes = new CopyOnWriteArrayList<>(); + try ( + MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT); + MockTransportService cluster2Transport = startTransport("cluster_2_node", knownNodes, Version.CURRENT) + ) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + knownNodes.add(cluster2Transport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try ( + MockTransportService transportService = MockTransportService.createNewService( + Settings.EMPTY, + Version.CURRENT, + threadPool, + null + ) + ) { + transportService.start(); + transportService.acceptIncomingRequests(); + SearchTransportService searchTransportService = new SearchTransportService(transportService, null) { + @Override + public void updatePitContext( + Transport.Connection connection, + UpdatePitContextRequest request, + ActionListener listener + ) { + updateNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onFailure(new Exception("node down"))); + t.start(); + } + + @Override + public void sendFreeContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { + deleteNodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(true))); + t.start(); + } + + @Override + public Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + CreatePitController controller = new CreatePitController( + request, + searchTransportService, + clusterServiceMock, + transportSearchAction, + namedWriteableRegistry, + task, + createPitListener + ); + + CountDownLatch latch = new CountDownLatch(1); + + ActionListener updatelistener = new LatchedActionListener<>(new ActionListener() { + @Override + public void onResponse(CreatePitResponse createPITResponse) { + throw new AssertionError("response is called"); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e.getMessage().contains("node down")); + } + }, latch); + + StepListener createListener = new StepListener<>(); + controller.executeCreatePit(createListener, updatelistener); + createListener.onResponse(searchResponse); + latch.await(); + assertEquals(3, updateNodesInvoked.size()); + /** + * check if cleanup is called for all nodes in case of update pit failure + */ + assertEquals(3, deleteNodesInvoked.size()); + } + } + + } + + QueryBuilder randomQueryBuilder() { + if (randomBoolean()) { + return new TermQueryBuilder(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } else if (randomBoolean()) { + return new MatchAllQueryBuilder(); + } else { + return new IdsQueryBuilder().addIds(randomAlphaOfLength(10)); + } + } + + private void setPitId() { + AtomicArray array = new AtomicArray<>(3); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("a", 1), + node1 + ); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("b", 12), + node2 + ); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult( + new ShardSearchContextId("c", 42), + node3 + ); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + array.setOnce(0, testSearchPhaseResult1); + array.setOnce(1, testSearchPhaseResult2); + array.setOnce(2, testSearchPhaseResult3); + + final Version version = Version.CURRENT; + final Map aliasFilters = new HashMap<>(); + for (SearchPhaseResult result : array.asList()) { + final AliasFilter aliasFilter; + if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder()); + } else if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder(), "alias-" + between(1, 10)); + } else { + aliasFilter = AliasFilter.EMPTY; + } + if (randomBoolean()) { + aliasFilters.put(result.getSearchShardTarget().getShardId().getIndex().getUUID(), aliasFilter); + } + } + pitId = SearchContextId.encode(array.asList(), aliasFilters, version); + } + +} diff --git a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java index f6ca12f1c514c..3c83f899dd1b5 100644 --- a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java @@ -67,6 +67,7 @@ import org.opensearch.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.internal.AliasFilter; import org.opensearch.search.internal.LegacyReaderContext; +import org.opensearch.search.internal.PitReaderContext; import org.opensearch.search.internal.ReaderContext; import org.opensearch.search.internal.ShardSearchContextId; import org.opensearch.search.internal.ShardSearchRequest; @@ -134,10 +135,12 @@ public void testPreProcess() throws Exception { int maxResultWindow = randomIntBetween(50, 100); int maxRescoreWindow = randomIntBetween(50, 100); int maxSlicesPerScroll = randomIntBetween(50, 100); + int maxSlicesPerPit = randomIntBetween(50, 100); Settings settings = Settings.builder() .put("index.max_result_window", maxResultWindow) .put("index.max_slices_per_scroll", maxSlicesPerScroll) .put("index.max_rescore_window", maxRescoreWindow) + .put("index.max_slices_per_pit", maxSlicesPerPit) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) @@ -300,13 +303,13 @@ protected Engine.Searcher acquireSearcherInternal(String source) { ); readerContext.close(); - readerContext = new ReaderContext( + readerContext = new LegacyReaderContext( newContextId(), indexService, indexShard, searcherSupplier.get(), - randomNonNegativeLong(), - false + shardSearchRequest, + randomNonNegativeLong() ); // rescore is null but sliceBuilder is not null DefaultSearchContext context2 = new DefaultSearchContext( @@ -400,6 +403,48 @@ protected Engine.Searcher acquireSearcherInternal(String source) { assertTrue(query1 instanceof MatchNoDocsQuery || query2 instanceof MatchNoDocsQuery); readerContext.close(); + + ReaderContext pitReaderContext = new PitReaderContext( + newContextId(), + indexService, + indexShard, + searcherSupplier.get(), + 1000, + true + ); + DefaultSearchContext context5 = new DefaultSearchContext( + pitReaderContext, + shardSearchRequest, + target, + null, + bigArrays, + null, + timeout, + null, + false, + Version.CURRENT, + false, + executor + ); + int numSlicesForPit = maxSlicesPerPit + randomIntBetween(1, 100); + when(sliceBuilder.getMax()).thenReturn(numSlicesForPit); + context5.sliceBuilder(sliceBuilder); + + exception = expectThrows(IllegalArgumentException.class, () -> context5.preProcess(false)); + assertThat( + exception.getMessage(), + equalTo( + "The number of slices [" + + numSlicesForPit + + "] is too large. It must " + + "be less than [" + + maxSlicesPerPit + + "]. This limit can be set by changing the [" + + IndexSettings.MAX_SLICES_PER_PIT.getKey() + + "] index level setting." + ) + ); + pitReaderContext.close(); threadPool.shutdown(); } } diff --git a/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java new file mode 100644 index 0000000000000..b2cdd156576d8 --- /dev/null +++ b/server/src/test/java/org/opensearch/search/PitMultiNodeTests.java @@ -0,0 +1,211 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.action.ActionFuture; +import org.opensearch.action.search.CreatePitAction; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.search.builder.PointInTimeBuilder; +import org.opensearch.test.InternalTestCluster; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.containsString; +import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +/** + * Multi node integration tests for PIT creation and search operation with PIT ID. + */ +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) +public class PitMultiNodeTests extends OpenSearchIntegTestCase { + + @Before + public void setupIndex() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).execute().get(); + ensureGreen(); + } + + @After + public void clearIndex() { + client().admin().indices().prepareDelete("index").get(); + } + + public void testPit() throws Exception { + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + SearchResponse searchResponse = client().prepareSearch("index") + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + assertEquals(2, searchResponse.getSuccessfulShards()); + assertEquals(2, searchResponse.getTotalShards()); + } + + public void testCreatePitWhileNodeDropWithAllowPartialCreationFalse() throws Exception { + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), false); + request.setIndices(new String[] { "index" }); + internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + ExecutionException ex = expectThrows(ExecutionException.class, execute::get); + assertTrue(ex.getMessage().contains("Failed to execute phase [create_pit]")); + assertTrue(ex.getMessage().contains("Partial shards failure")); + return super.onNodeStopped(nodeName); + } + }); + } + + public void testCreatePitWhileNodeDropWithAllowPartialCreationTrue() throws Exception { + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + assertEquals(1, pitResponse.getSuccessfulShards()); + assertEquals(2, pitResponse.getTotalShards()); + SearchResponse searchResponse = client().prepareSearch("index") + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + assertEquals(1, searchResponse.getSuccessfulShards()); + assertEquals(1, searchResponse.getTotalShards()); + return super.onNodeStopped(nodeName); + } + }); + } + + public void testPitSearchWithNodeDrop() throws Exception { + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + SearchResponse searchResponse = client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + assertEquals(1, searchResponse.getSuccessfulShards()); + assertEquals(1, searchResponse.getFailedShards()); + assertEquals(0, searchResponse.getSkippedShards()); + assertEquals(2, searchResponse.getTotalShards()); + return super.onNodeStopped(nodeName); + } + }); + } + + public void testPitSearchWithNodeDropWithPartialSearchResultsFalse() throws Exception { + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + ActionFuture execute = client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .setAllowPartialSearchResults(false) + .execute(); + ExecutionException ex = expectThrows(ExecutionException.class, execute::get); + assertTrue(ex.getMessage().contains("Partial shards failure")); + return super.onNodeStopped(nodeName); + } + }); + } + + public void testPitInvalidDefaultKeepAlive() { + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("pit.max_keep_alive", "1m").put("search.default_keep_alive", "2m")) + .get() + ); + assertThat(exc.getMessage(), containsString("was (2m > 1m)")); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "5m").put("pit.max_keep_alive", "5m")) + .get() + ); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "2m")) + .get() + ); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("pit.max_keep_alive", "2m")) + .get() + ); + + exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "3m")) + .get() + ); + assertThat(exc.getMessage(), containsString("was (3m > 2m)")); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("search.default_keep_alive", "1m")) + .get() + ); + + exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put("pit.max_keep_alive", "30s")) + .get() + ); + assertThat(exc.getMessage(), containsString("was (1m > 30s)")); + + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull("*")) + .setTransientSettings(Settings.builder().putNull("*")) + ); + + } + +} diff --git a/server/src/test/java/org/opensearch/search/PitSingleNodeTests.java b/server/src/test/java/org/opensearch/search/PitSingleNodeTests.java new file mode 100644 index 0000000000000..2275a07e0d807 --- /dev/null +++ b/server/src/test/java/org/opensearch/search/PitSingleNodeTests.java @@ -0,0 +1,575 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search; + +import org.hamcrest.Matchers; +import org.opensearch.action.ActionFuture; +import org.opensearch.action.search.*; +import org.opensearch.common.Priority; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.search.builder.PointInTimeBuilder; +import org.opensearch.search.sort.SortOrder; +import org.opensearch.test.OpenSearchSingleNodeTestCase; + +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.query.QueryBuilders.matchAllQuery; +import static org.opensearch.index.query.QueryBuilders.queryStringQuery; +import static org.opensearch.index.query.QueryBuilders.termQuery; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; + +/** + * Single node integration tests for various PIT use cases such as create pit, search etc + */ +public class PitSingleNodeTests extends OpenSearchSingleNodeTestCase { + @Override + protected boolean resetNodeAfterTest() { + return true; + } + + @Override + protected Settings nodeSettings() { + // very frequent checks + return Settings.builder() + .put(super.nodeSettings()) + .put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(1)) + .put(CreatePitController.PIT_INIT_KEEP_ALIVE.getKey(), TimeValue.timeValueSeconds(1)) + .build(); + } + + public void testCreatePITSuccess() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + client().prepareIndex("index").setId("2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + SearchResponse searchResponse = client().prepareSearch("index") + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + assertHitCount(searchResponse, 1); + + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(2, service.getActiveContexts()); + service.doClose(); // this kills the keep-alive reaper we have to reset the node after this test + } + + public void testCreatePITWithMultipleIndicesSuccess() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + createIndex("index1", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index1").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index", "index1" }); + SearchService service = getInstanceFromNode(SearchService.class); + + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse response = execute.get(); + assertEquals(4, response.getSuccessfulShards()); + assertEquals(4, service.getActiveContexts()); + service.doClose(); + } + + public void testCreatePITWithShardReplicasSuccess() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 1).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + + client().prepareIndex("index").setId("2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + SearchResponse searchResponse = client().prepareSearch("index") + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + assertHitCount(searchResponse, 1); + + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(2, service.getActiveContexts()); + service.doClose(); + } + + public void testCreatePITWithNonExistentIndex() { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index", "index1" }); + SearchService service = getInstanceFromNode(SearchService.class); + + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + + ExecutionException ex = expectThrows(ExecutionException.class, execute::get); + + assertTrue(ex.getMessage().contains("no such index [index1]")); + assertEquals(0, service.getActiveContexts()); + service.doClose(); + } + + public void testCreatePITOnCloseIndex() { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("index").setId("2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + client().admin().indices().prepareClose("index").get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + + ExecutionException ex = expectThrows(ExecutionException.class, execute::get); + + assertTrue(ex.getMessage().contains("IndexClosedException")); + + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(0, service.getActiveContexts()); + service.doClose(); + } + + public void testPitSearchOnDeletedIndex() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + client().admin().indices().prepareDelete("index").get(); + + IndexNotFoundException ex = expectThrows(IndexNotFoundException.class, () -> { + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + }); + assertTrue(ex.getMessage().contains("no such index [index]")); + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(0, service.getActiveContexts()); + service.doClose(); + } + + public void testInvalidPitId() { + createIndex("idx"); + String id = "c2Nhbjs2OzM0NDg1ODpzRlBLc0FXNlNyNm5JWUc1"; + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(id).setKeepAlive(TimeValue.timeValueDays(1))) + .get() + ); + assertEquals("invalid id: [" + id + "]", e.getMessage()); + } + + public void testPitSearchOnCloseIndex() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(2, service.getActiveContexts()); + client().admin().indices().prepareClose("index").get(); + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, () -> { + SearchResponse searchResponse = client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + }); + assertTrue(ex.shardFailures()[0].reason().contains("SearchContextMissingException")); + assertEquals(0, service.getActiveContexts()); + + // PIT reader contexts are lost after close, verifying it with open index api + client().admin().indices().prepareOpen("index").get(); + ex = expectThrows(SearchPhaseExecutionException.class, () -> { + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + }); + assertTrue(ex.shardFailures()[0].reason().contains("SearchContextMissingException")); + assertEquals(0, service.getActiveContexts()); + service.doClose(); + } + + public void testSearchWithFirstPhaseKeepAliveExpiry() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueMillis(100), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(2, service.getActiveContexts()); + // since first phase temporary keep alive is set at 1 second in this test file + // and create pit request keep alive is less than that, keep alive is set to 1 second, (max of 2 keep alives) + // so reader context will clear up after 1 second + Thread.sleep(1000); + client().prepareIndex("index").setId("2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, () -> { + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + }); + assertTrue(ex.shardFailures()[0].reason().contains("SearchContextMissingException")); + assertEquals(0, service.getActiveContexts()); + service.doClose(); + } + + public void testSearchWithPitSecondPhaseKeepAliveExpiry() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueSeconds(2), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(2, service.getActiveContexts()); + Thread.sleep(1000); + assertEquals(2, service.getActiveContexts()); + Thread.sleep(1500); + assertEquals(0, service.getActiveContexts()); + client().prepareIndex("index").setId("2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, () -> { + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .get(); + }); + assertTrue(ex.shardFailures()[0].reason().contains("SearchContextMissingException")); + service.doClose(); + } + + public void testSearchWithPitKeepAliveExtension() throws ExecutionException, InterruptedException { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueSeconds(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(2, service.getActiveContexts()); + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueSeconds(3))) + .get(); + client().prepareIndex("index").setId("2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + Thread.sleep(2500); + assertEquals(2, service.getActiveContexts()); + Thread.sleep(1000); + assertEquals(0, service.getActiveContexts()); + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, () -> { + client().prepareSearch("index") + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueMinutes(1))) + .get(); + }); + assertTrue(ex.shardFailures()[0].reason().contains("SearchContextMissingException")); + service.doClose(); + } + + public void testMaxOpenPitContexts() throws Exception { + createIndex("index"); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + SearchService service = getInstanceFromNode(SearchService.class); + + for (int i = 0; i < SearchService.MAX_OPEN_PIT_CONTEXT.get(Settings.EMPTY); i++) { + client().execute(CreatePitAction.INSTANCE, request).get(); + } + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + ExecutionException ex = expectThrows(ExecutionException.class, execute::get); + + assertTrue( + ex.getMessage() + .contains( + "Trying to create too many Point In Time contexts. " + + "Must be less than or equal to: [" + + SearchService.MAX_OPEN_PIT_CONTEXT.get(Settings.EMPTY) + + "]. " + + "This limit can be set by changing the [search.max_open_pit_context] setting." + ) + ); + service.doClose(); + } + + public void testOpenPitContextsConcurrently() throws Exception { + createIndex("index"); + final int maxPitContexts = SearchService.MAX_OPEN_PIT_CONTEXT.get(Settings.EMPTY); + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + SearchService service = getInstanceFromNode(SearchService.class); + Thread[] threads = new Thread[randomIntBetween(2, 8)]; + CountDownLatch latch = new CountDownLatch(threads.length); + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + latch.countDown(); + try { + latch.await(); + for (;;) { + try { + client().execute(CreatePitAction.INSTANCE, request).get(); + } catch (ExecutionException e) { + assertTrue( + e.getMessage() + .contains( + "Trying to create too many Point In Time contexts. " + + "Must be less than or equal to: [" + + SearchService.MAX_OPEN_PIT_CONTEXT.get(Settings.EMPTY) + + "]. " + + "This limit can be set by changing the [" + + SearchService.MAX_OPEN_PIT_CONTEXT.getKey() + + "] setting." + ) + ); + return; + } + } + } catch (Exception e) { + throw new AssertionError(e); + } + }); + threads[i].setName("opensearch[node_s_0][search]"); + threads[i].start(); + } + for (Thread thread : threads) { + thread.join(); + } + assertThat(service.getActiveContexts(), equalTo(maxPitContexts)); + service.doClose(); + } + + /** + * Point in time search should return the same results as creation time and index updates should not affect the PIT search results + */ + public void testPitAfterUpdateIndex() throws Exception { + client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 5)).get(); + client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); + + for (int i = 0; i < 50; i++) { + client().prepareIndex("test") + .setId(Integer.toString(i)) + .setSource( + jsonBuilder().startObject() + .field("user", "foobar") + .field("postDate", System.currentTimeMillis()) + .field("message", "test") + .endObject() + ) + .get(); + } + client().admin().indices().prepareRefresh().get(); + + // create pit + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueMinutes(2), true); + request.setIndices(new String[] { "test" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + SearchService service = getInstanceFromNode(SearchService.class); + + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(matchAllQuery()) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + + // update index + SearchResponse searchResponse = client().prepareSearch() + .setQuery(queryStringQuery("user:foobar")) + .setSize(50) + .addSort("postDate", SortOrder.ASC) + .get(); + try { + do { + for (SearchHit searchHit : searchResponse.getHits().getHits()) { + Map map = searchHit.getSourceAsMap(); + map.put("message", "update"); + client().prepareIndex("test").setId(searchHit.getId()).setSource(map).get(); + } + searchResponse = client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get(); + + } while (searchResponse.getHits().getHits().length > 0); + + client().admin().indices().prepareRefresh().get(); + assertThat( + client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, + Matchers.equalTo(50L) + ); + /** + * assert without point in time + */ + + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, + Matchers.equalTo(50L) + ); + /** + * using point in time id will have the same search results as ones before update + */ + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "test")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(50L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + assertThat( + client().prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitResponse.getId())) + .setSize(0) + .setQuery(termQuery("message", "update")) + .get() + .getHits() + .getTotalHits().value, + Matchers.equalTo(0L) + ); + } finally { + service.doClose(); + assertEquals(0, service.getActiveContexts()); + } + } + + public void testConcurrentSearches() throws Exception { + createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); + client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); + + CreatePitRequest request = new CreatePitRequest(TimeValue.timeValueDays(1), true); + request.setIndices(new String[] { "index" }); + ActionFuture execute = client().execute(CreatePitAction.INSTANCE, request); + CreatePitResponse pitResponse = execute.get(); + Thread[] threads = new Thread[5]; + CountDownLatch latch = new CountDownLatch(threads.length); + + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + latch.countDown(); + try { + latch.await(); + for (int j = 0; j < 50; j++) { + client().prepareSearch() + .setSize(2) + .setPointInTime(new PointInTimeBuilder(pitResponse.getId()).setKeepAlive(TimeValue.timeValueDays(1))) + .execute() + .get(); + } + } catch (Exception e) { + throw new AssertionError(e); + } + }); + threads[i].setName("opensearch[node_s_0][search]"); + threads[i].start(); + } + for (Thread thread : threads) { + thread.join(); + } + + SearchService service = getInstanceFromNode(SearchService.class); + assertEquals(2, service.getActiveContexts()); + service.doClose(); + assertEquals(0, service.getActiveContexts()); + } +} diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 4e342875e4599..aca537ab07a29 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -46,6 +46,8 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchShardTask; import org.opensearch.action.search.SearchType; +import org.opensearch.action.search.UpdatePitContextRequest; +import org.opensearch.action.search.UpdatePitContextResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.WriteRequest; @@ -1406,12 +1408,108 @@ public void testOpenReaderContext() { createIndex("index"); SearchService searchService = getInstanceFromNode(SearchService.class); PlainActionFuture future = new PlainActionFuture<>(); - searchService.openReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); + searchService.createPitReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); future.actionGet(); assertThat(searchService.getActiveContexts(), equalTo(1)); assertTrue(searchService.freeReaderContext(future.actionGet())); } + public void testPitContextMaxKeepAlive() { + createIndex("index"); + SearchService searchService = getInstanceFromNode(SearchService.class); + PlainActionFuture future = new PlainActionFuture<>(); + + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { + searchService.createPitReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueHours(25), future); + future.actionGet(); + }); + assertEquals( + "Keep alive for request (1d) is too large. " + + "It must be less than (" + + SearchService.MAX_PIT_KEEPALIVE_SETTING.get(Settings.EMPTY) + + "). " + + "This limit can be set by changing the [" + + SearchService.MAX_PIT_KEEPALIVE_SETTING.getKey() + + "] cluster level setting.", + ex.getMessage() + ); + assertThat(searchService.getActiveContexts(), equalTo(0)); + } + + public void testUpdatePitId() { + createIndex("index"); + SearchService searchService = getInstanceFromNode(SearchService.class); + PlainActionFuture future = new PlainActionFuture<>(); + searchService.createPitReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); + ShardSearchContextId id = future.actionGet(); + PlainActionFuture updateFuture = new PlainActionFuture<>(); + UpdatePitContextRequest updateRequest = new UpdatePitContextRequest( + id, + "pitId", + TimeValue.timeValueMinutes(between(1, 10)).millis(), + System.currentTimeMillis() + ); + searchService.updatePitIdAndKeepAlive(updateRequest, updateFuture); + UpdatePitContextResponse updateResponse = updateFuture.actionGet(); + assertTrue(updateResponse.getPitId().equalsIgnoreCase("pitId")); + assertTrue(updateResponse.getCreationTime() == updateRequest.getCreationTime()); + assertTrue(updateResponse.getKeepAlive() == updateRequest.getKeepAlive()); + assertTrue(updateResponse.getPitId().equalsIgnoreCase("pitId")); + assertThat(searchService.getActiveContexts(), equalTo(1)); + assertTrue(searchService.freeReaderContext(future.actionGet())); + } + + public void testUpdatePitIdMaxKeepAlive() { + createIndex("index"); + SearchService searchService = getInstanceFromNode(SearchService.class); + PlainActionFuture future = new PlainActionFuture<>(); + searchService.createPitReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); + ShardSearchContextId id = future.actionGet(); + + UpdatePitContextRequest updateRequest = new UpdatePitContextRequest( + id, + "pitId", + TimeValue.timeValueHours(25).millis(), + System.currentTimeMillis() + ); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { + PlainActionFuture updateFuture = new PlainActionFuture<>(); + searchService.updatePitIdAndKeepAlive(updateRequest, updateFuture); + }); + + assertEquals( + "Keep alive for request (1d) is too large. " + + "It must be less than (" + + SearchService.MAX_PIT_KEEPALIVE_SETTING.get(Settings.EMPTY) + + "). " + + "This limit can be set by changing the [" + + SearchService.MAX_PIT_KEEPALIVE_SETTING.getKey() + + "] cluster level setting.", + ex.getMessage() + ); + assertThat(searchService.getActiveContexts(), equalTo(1)); + assertTrue(searchService.freeReaderContext(future.actionGet())); + } + + public void testUpdatePitIdWithInvalidReaderId() { + SearchService searchService = getInstanceFromNode(SearchService.class); + ShardSearchContextId id = new ShardSearchContextId("session", 9); + + UpdatePitContextRequest updateRequest = new UpdatePitContextRequest( + id, + "pitId", + TimeValue.timeValueHours(23).millis(), + System.currentTimeMillis() + ); + SearchContextMissingException ex = expectThrows(SearchContextMissingException.class, () -> { + PlainActionFuture updateFuture = new PlainActionFuture<>(); + searchService.updatePitIdAndKeepAlive(updateRequest, updateFuture); + }); + + assertEquals("No search context found for id [" + id.getId() + "]", ex.getMessage()); + assertThat(searchService.getActiveContexts(), equalTo(0)); + } + private ReaderContext createReaderContext(IndexService indexService, IndexShard indexShard) { return new ReaderContext( new ShardSearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong()), diff --git a/server/src/test/java/org/opensearch/search/pit/RestCreatePitActionTests.java b/server/src/test/java/org/opensearch/search/pit/RestCreatePitActionTests.java new file mode 100644 index 0000000000000..5ca384daedbff --- /dev/null +++ b/server/src/test/java/org/opensearch/search/pit/RestCreatePitActionTests.java @@ -0,0 +1,78 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pit; + +import org.apache.lucene.util.SetOnce; +import org.opensearch.action.ActionListener; +import org.opensearch.action.search.CreatePitRequest; +import org.opensearch.action.search.CreatePitResponse; +import org.opensearch.client.node.NodeClient; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.search.RestCreatePitAction; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.client.NoOpNodeClient; +import org.opensearch.test.rest.FakeRestChannel; +import org.opensearch.test.rest.FakeRestRequest; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests to verify behavior of create pit rest action + */ +public class RestCreatePitActionTests extends OpenSearchTestCase { + public void testRestCreatePit() throws Exception { + SetOnce createPitCalled = new SetOnce<>(); + RestCreatePitAction action = new RestCreatePitAction(); + try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { + @Override + public void createPit(CreatePitRequest request, ActionListener listener) { + createPitCalled.set(true); + assertThat(request.getKeepAlive().getStringRep(), equalTo("1m")); + assertFalse(request.shouldAllowPartialPitCreation()); + } + }) { + Map params = new HashMap<>(); + params.put("keep_alive", "1m"); + params.put("allow_partial_pit_creation", "false"); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params) + .withMethod(RestRequest.Method.POST) + .build(); + FakeRestChannel channel = new FakeRestChannel(request, false, 0); + action.handleRequest(request, channel, nodeClient); + + assertThat(createPitCalled.get(), equalTo(true)); + } + } + + public void testRestCreatePitDefaultPartialCreation() throws Exception { + SetOnce createPitCalled = new SetOnce<>(); + RestCreatePitAction action = new RestCreatePitAction(); + try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { + @Override + public void createPit(CreatePitRequest request, ActionListener listener) { + createPitCalled.set(true); + assertThat(request.getKeepAlive().getStringRep(), equalTo("1m")); + assertTrue(request.shouldAllowPartialPitCreation()); + } + }) { + Map params = new HashMap<>(); + params.put("keep_alive", "1m"); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params) + .withMethod(RestRequest.Method.POST) + .build(); + FakeRestChannel channel = new FakeRestChannel(request, false, 0); + action.handleRequest(request, channel, nodeClient); + + assertThat(createPitCalled.get(), equalTo(true)); + } + } +}