/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search; import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.Query; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchLocalRequest; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.singletonList; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.DELETED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; public class SearchServiceTests extends ESSingleNodeTestCase { @Override protected boolean resetNodeAfterTest() { return true; } @Override protected Collection<Class<? extends Plugin>> getPlugins() { return pluginList(FailOnRewriteQueryPlugin.class); } @Override protected Settings nodeSettings() { return Settings.builder().put("search.default_search_timeout", "5s").build(); } public void testClearOnClose() throws ExecutionException, InterruptedException { createIndex("index"); client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); service.doClose(); // this kills the keep-alive reaper we have to reset the node after this test assertEquals(0, service.getActiveContexts()); } public void testClearOnStop() throws ExecutionException, InterruptedException { createIndex("index"); client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); service.doStop(); assertEquals(0, service.getActiveContexts()); } public void testClearIndexDelete() throws ExecutionException, InterruptedException { createIndex("index"); client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); assertAcked(client().admin().indices().prepareDelete("index")); assertEquals(0, service.getActiveContexts()); } public void testCloseSearchContextOnRewriteException() { createIndex("index"); client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchService service = getInstanceFromNode(SearchService.class); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); IndexShard indexShard = indexService.getShard(0); final int activeContexts = service.getActiveContexts(); final int activeRefs = indexShard.store().refCount(); expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("index").setQuery(new FailOnRewriteQueryBuilder()).get()); assertEquals(activeContexts, service.getActiveContexts()); assertEquals(activeRefs, indexShard.store().refCount()); } public void testSearchWhileIndexDeleted() throws IOException, InterruptedException { createIndex("index"); client().prepareIndex("index", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); SearchService service = getInstanceFromNode(SearchService.class); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); IndexShard indexShard = indexService.getShard(0); AtomicBoolean running = new AtomicBoolean(true); CountDownLatch startGun = new CountDownLatch(1); Semaphore semaphore = new Semaphore(Integer.MAX_VALUE); final Thread thread = new Thread() { @Override public void run() { startGun.countDown(); while(running.get()) { service.afterIndexRemoved(indexService.index(), indexService.getIndexSettings(), DELETED); if (randomBoolean()) { // here we trigger some refreshes to ensure the IR go out of scope such that we hit ACE if we access a search // context in a non-sane way. try { semaphore.acquire(); } catch (InterruptedException e) { throw new AssertionError(e); } client().prepareIndex("index", "type").setSource("field", "value") .setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())).execute(new ActionListener<IndexResponse>() { @Override public void onResponse(IndexResponse indexResponse) { semaphore.release(); } @Override public void onFailure(Exception e) { semaphore.release(); } }); } } } }; thread.start(); startGun.await(); try { final int rounds = scaledRandomIntBetween(100, 10000); for (int i = 0; i < rounds; i++) { try { SearchPhaseResult searchPhaseResult = service.executeQueryPhase( new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f), new SearchTask(123L, "", "", "", null)); IntArrayList intCursors = new IntArrayList(1); intCursors.add(0); ShardFetchRequest req = new ShardFetchRequest(searchPhaseResult.getRequestId(), intCursors, null /* not a scroll */); service.executeFetchPhase(req, new SearchTask(123L, "", "", "", null)); } catch (AlreadyClosedException ex) { throw ex; } catch (IllegalStateException ex) { assertEquals("search context is already closed can't increment refCount current count [0]", ex.getMessage()); } catch (SearchContextMissingException ex) { // that's fine } } } finally { running.set(false); thread.join(); semaphore.acquire(Integer.MAX_VALUE); } } public void testTimeout() throws IOException { createIndex("index"); final SearchService service = getInstanceFromNode(SearchService.class); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); final IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); final IndexShard indexShard = indexService.getShard(0); final SearchContext contextWithDefaultTimeout = service.createContext( new ShardSearchLocalRequest( indexShard.shardId(), 1, SearchType.DEFAULT, new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f), null); try { // the search context should inherit the default timeout assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5))); } finally { contextWithDefaultTimeout.decRef(); service.freeContext(contextWithDefaultTimeout.id()); } final long seconds = randomIntBetween(6, 10); final SearchContext context = service.createContext( new ShardSearchLocalRequest( indexShard.shardId(), 1, SearchType.DEFAULT, new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds)), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f), null); try { // the search context should inherit the query timeout assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds))); } finally { context.decRef(); service.freeContext(context.id()); } } public static class FailOnRewriteQueryPlugin extends Plugin implements SearchPlugin { @Override public List<QuerySpec<?>> getQueries() { return singletonList(new QuerySpec<>("fail_on_rewrite_query", FailOnRewriteQueryBuilder::new, parseContext -> { throw new UnsupportedOperationException("No query parser for this plugin"); })); } } public static class FailOnRewriteQueryBuilder extends AbstractQueryBuilder<FailOnRewriteQueryBuilder> { public FailOnRewriteQueryBuilder(StreamInput in) throws IOException { super(in); } public FailOnRewriteQueryBuilder() { } @Override protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { throw new IllegalStateException("Fail on rewrite phase"); } @Override protected void doWriteTo(StreamOutput out) throws IOException { } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { } @Override protected Query doToQuery(QueryShardContext context) throws IOException { return null; } @Override protected boolean doEquals(FailOnRewriteQueryBuilder other) { return false; } @Override protected int doHashCode() { return 0; } @Override public String getWriteableName() { return null; } } }