From bebff47b5b9b27b6bb6156bda2a61aa0e81af3e6 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Sep 2017 16:32:34 -0700 Subject: [PATCH 01/13] File Discovery: Remove fallback with zen discovery (#26667) When adding file based discovery, we added a fallback when the discovery type was set to zen (the default, so everyone got this warning). This commit removes the fallback for 6.0. Setting file discovery should now happen explicitly through the hosts_provider setting. closes #26661 --- plugins/discovery-file/build.gradle | 1 + .../file/FileBasedDiscoveryPlugin.java | 16 ------- .../file/FileBasedDiscoveryPluginTests.java | 47 ------------------- 3 files changed, 1 insertion(+), 63 deletions(-) delete mode 100644 plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java diff --git a/plugins/discovery-file/build.gradle b/plugins/discovery-file/build.gradle index 91457924351bf..145d959fa4100 100644 --- a/plugins/discovery-file/build.gradle +++ b/plugins/discovery-file/build.gradle @@ -52,6 +52,7 @@ setupSeedNodeAndUnicastHostsFile.doLast { integTestCluster { dependsOn setupSeedNodeAndUnicastHostsFile clusterName = 'discovery-file-test-cluster' + setting 'discovery.zen.hosts_provider', 'file' extraConfigFile 'discovery-file/unicast_hosts.txt', srcUnicastHostsFile } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java index 2cda88f796ed9..b5d16a547d514 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java @@ -102,20 +102,4 @@ public Map> getZenHostsProviders(Transpor () -> new FileBasedUnicastHostsProvider( new Environment(settings, configPath), transportService, fileBasedDiscoveryExecutorService)); } - - @Override - public Settings additionalSettings() { - // For 5.0, the hosts provider was "zen", but this was before the discovery.zen.hosts_provider - // setting existed. This check looks for the legacy zen, and sets the file hosts provider if not set - String discoveryType = DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings); - if (discoveryType.equals("zen")) { - deprecationLogger.deprecated("Using " + DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey() + - " setting to set hosts provider is deprecated. " + - "Set \"" + DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey() + ": file\" instead"); - if (DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.exists(settings) == false) { - return Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file").build(); - } - } - return Settings.EMPTY; - } } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java deleted file mode 100644 index 838d53d2d6221..0000000000000 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginTests.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.discovery.file; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.nio.file.Path; - -public class FileBasedDiscoveryPluginTests extends ESTestCase { - - public void testHostsProviderBwc() { - FileBasedDiscoveryPlugin plugin = new FileBasedDiscoveryPlugin(Settings.EMPTY, createTempDir()); - Settings additionalSettings = plugin.additionalSettings(); - assertEquals("file", additionalSettings.get(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey())); - assertWarnings("Using discovery.type setting to set hosts provider is deprecated. " + - "Set \"discovery.zen.hosts_provider: file\" instead"); - } - - public void testHostsProviderExplicit() { - Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "foo").build(); - FileBasedDiscoveryPlugin plugin = new FileBasedDiscoveryPlugin(settings, createTempDir()); - assertEquals(Settings.EMPTY, plugin.additionalSettings()); - assertWarnings("Using discovery.type setting to set hosts provider is deprecated. " + - "Set \"discovery.zen.hosts_provider: file\" instead"); - } - -} From a1c766c75cec4aedde226f6f9d0e0b60e6f40592 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Sep 2017 17:41:51 -0700 Subject: [PATCH 02/13] Build: Set bwc builds to always set snapshot (#26704) This commit enforces bwc builds always generate snapshot versions, even when testing release versions in CI. closes #26702 --- distribution/bwc/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 71c8ce59cb7a8..f2c4a09edd4d8 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -134,6 +134,7 @@ if (enabled) { dependsOn checkoutBwcBranch, writeBuildMetadata dir = checkoutDir tasks = [':distribution:deb:assemble', ':distribution:rpm:assemble', ':distribution:zip:assemble'] + startParameter.systemPropertiesArgs = ['build.snapshot': 'true'] doLast { List missing = [bwcDeb, bwcRpm, bwcZip].grep { file -> false == file.exists() } From 61849a1150ae298fe38667341bc640fecb952a50 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 18 Sep 2017 07:41:18 +0200 Subject: [PATCH 03/13] aggs: Allow aggregation sorting via nested aggregation. The nested aggregator now buffers all bucket ords per parent document and emits all bucket ords for a parent document's nested document once. This way the nested documents document DocIdSetIterator gets used once per bucket instead of wrapping the nested aggregator inside a multi bucket aggregator, which was the current solution upto now. This allows sorting by buckets under a nested bucket. Closes #16838 --- .../bucket/nested/NestedAggregator.java | 115 +++++++-- .../nested/NestedAggregatorFactory.java | 6 +- .../metrics/tophits/TopHitsAggregator.java | 9 +- .../bucket/nested/NestedAggregatorTests.java | 218 ++++++++++++++++++ 4 files changed, 321 insertions(+), 27 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 99932bdc2fab7..b39bf864ad2b7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.search.aggregations.bucket.nested; +import com.carrotsearch.hppc.LongArrayList; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; @@ -51,14 +52,19 @@ class NestedAggregator extends BucketsAggregator implements SingleBucketAggregat private final BitSetProducer parentFilter; private final Query childFilter; + private final boolean collectsFromSingleBucket; + + private BufferingNestedLeafBucketCollector bufferingNestedLeafBucketCollector; NestedAggregator(String name, AggregatorFactories factories, ObjectMapper parentObjectMapper, ObjectMapper childObjectMapper, - SearchContext context, Aggregator parentAggregator, - List pipelineAggregators, Map metaData) throws IOException { + SearchContext context, Aggregator parentAggregator, + List pipelineAggregators, Map metaData, + boolean collectsFromSingleBucket) throws IOException { super(name, factories, context, parentAggregator, pipelineAggregators, metaData); Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter() : Queries.newNonNestedFilter(); this.parentFilter = context.bitsetFilterCache().getBitSetProducer(parentFilter); this.childFilter = childObjectMapper.nestedTypeFilter(); + this.collectsFromSingleBucket = collectsFromSingleBucket; } @Override @@ -71,26 +77,38 @@ public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final L final BitSet parentDocs = parentFilter.getBitSet(ctx); final DocIdSetIterator childDocs = childDocsScorer != null ? childDocsScorer.iterator() : null; - return new LeafBucketCollectorBase(sub, null) { - @Override - public void collect(int parentDoc, long bucket) throws IOException { - // if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent - // doc), so we can skip: - if (parentDoc == 0 || parentDocs == null || childDocs == null) { - return; - } + if (collectsFromSingleBucket) { + return new LeafBucketCollectorBase(sub, null) { + @Override + public void collect(int parentDoc, long bucket) throws IOException { + // if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent + // doc), so we can skip: + if (parentDoc == 0 || parentDocs == null || childDocs == null) { + return; + } - final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1); - int childDocId = childDocs.docID(); - if (childDocId <= prevParentDoc) { - childDocId = childDocs.advance(prevParentDoc + 1); - } + final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1); + int childDocId = childDocs.docID(); + if (childDocId <= prevParentDoc) { + childDocId = childDocs.advance(prevParentDoc + 1); + } - for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) { - collectBucket(sub, childDocId, bucket); + for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) { + collectBucket(sub, childDocId, bucket); + } } - } - }; + }; + } else { + doPostCollection(); + return bufferingNestedLeafBucketCollector = new BufferingNestedLeafBucketCollector(sub, parentDocs, childDocs); + } + } + + @Override + protected void doPostCollection() throws IOException { + if (bufferingNestedLeafBucketCollector != null) { + bufferingNestedLeafBucketCollector.postCollect(); + } } @Override @@ -104,4 +122,63 @@ public InternalAggregation buildEmptyAggregation() { return new InternalNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); } + class BufferingNestedLeafBucketCollector extends LeafBucketCollectorBase { + + final BitSet parentDocs; + final LeafBucketCollector sub; + final DocIdSetIterator childDocs; + final LongArrayList bucketBuffer = new LongArrayList(); + + int currentParentDoc = -1; + + BufferingNestedLeafBucketCollector(LeafBucketCollector sub, BitSet parentDocs, DocIdSetIterator childDocs) { + super(sub, null); + this.sub = sub; + this.parentDocs = parentDocs; + this.childDocs = childDocs; + } + + @Override + public void collect(int parentDoc, long bucket) throws IOException { + // if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent + // doc), so we can skip: + if (parentDoc == 0 || parentDocs == null || childDocs == null) { + return; + } + + if (currentParentDoc != parentDoc) { + processChildBuckets(currentParentDoc, bucketBuffer); + currentParentDoc = parentDoc; + } + bucketBuffer.add(bucket); + } + + void processChildBuckets(int parentDoc, LongArrayList buckets) throws IOException { + if (bucketBuffer.isEmpty()) { + return; + } + + + final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1); + int childDocId = childDocs.docID(); + if (childDocId <= prevParentDoc) { + childDocId = childDocs.advance(prevParentDoc + 1); + } + + for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) { + final long[] buffer = buckets.buffer; + final int size = buckets.size(); + for (int i = 0; i < size; i++) { + collectBucket(sub, childDocId, buffer[i]); + } + } + bucketBuffer.clear(); + } + + void postCollect() throws IOException { + processChildBuckets(currentParentDoc, bucketBuffer); + } + + } + } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java index b491bf8ff0dc4..dfbe18ba87b4f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorFactory.java @@ -48,13 +48,11 @@ class NestedAggregatorFactory extends AggregatorFactory @Override public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { - if (collectsFromSingleBucket == false) { - return asMultiBucketAggregator(this, context, parent); - } if (childObjectMapper == null) { return new Unmapped(name, context, parent, pipelineAggregators, metaData); } - return new NestedAggregator(name, factories, parentObjectMapper, childObjectMapper, context, parent, pipelineAggregators, metaData); + return new NestedAggregator(name, factories, parentObjectMapper, childObjectMapper, context, parent, + pipelineAggregators, metaData, collectsFromSingleBucket); } private static final class Unmapped extends NonCollectingAggregator { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java index aeed0ef250ea4..84dd870e3f06d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java @@ -91,10 +91,6 @@ public boolean needsScores() { public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { - for (LongObjectPagedHashMap.Cursor cursor : topDocsCollectors) { - cursor.value.leafCollector = cursor.value.topLevelCollector.getLeafCollector(ctx); - } - return new LeafBucketCollectorBase(sub, null) { Scorer scorer; @@ -103,6 +99,11 @@ public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, public void setScorer(Scorer scorer) throws IOException { this.scorer = scorer; for (LongObjectPagedHashMap.Cursor cursor : topDocsCollectors) { + // Instantiate the leaf collector not in the getLeafCollector(...) method or in the constructor of this + // anonymous class. Otherwise in the case this leaf bucket collector gets invoked with post collection + // then we already have moved on to the next reader and then we may encounter assertion errors or + // incorrect results. + cursor.value.leafCollector = cursor.value.topLevelCollector.getLeafCollector(ctx); cursor.value.leafCollector.setScorer(scorer); } super.setScorer(scorer); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 7000924001f20..f6e18f828045f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; @@ -34,21 +35,33 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.InternalMax; +import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.min.Min; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.stream.DoubleStream; public class NestedAggregatorTests extends AggregatorTestCase { @@ -314,6 +327,189 @@ public void testResetRootDocId() throws Exception { } } + public void testNestedOrdering() throws IOException { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocuments(generateBook("1", new String[]{"a"}, new int[]{12, 13, 14})); + iw.addDocuments(generateBook("2", new String[]{"b"}, new int[]{5, 50})); + iw.addDocuments(generateBook("3", new String[]{"c"}, new int[]{39, 19})); + iw.addDocuments(generateBook("4", new String[]{"d"}, new int[]{2, 1, 3})); + iw.addDocuments(generateBook("5", new String[]{"a"}, new int[]{70, 10})); + iw.addDocuments(generateBook("6", new String[]{"e"}, new int[]{23, 21})); + iw.addDocuments(generateBook("7", new String[]{"e", "a"}, new int[]{8, 8})); + iw.addDocuments(generateBook("8", new String[]{"f"}, new int[]{12, 14})); + iw.addDocuments(generateBook("9", new String[]{"g", "c", "e"}, new int[]{18, 8})); + } + try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { + MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType1.setName("num_pages"); + MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType(); + fieldType2.setHasDocValues(true); + fieldType2.setName("author"); + + TermsAggregationBuilder termsBuilder = new TermsAggregationBuilder("authors", ValueType.STRING) + .field("author").order(BucketOrder.aggregation("chapters>num_pages.value", true)); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder("chapters", "nested_chapters"); + MaxAggregationBuilder maxAgg = new MaxAggregationBuilder("num_pages").field("num_pages"); + nestedBuilder.subAggregation(maxAgg); + termsBuilder.subAggregation(nestedBuilder); + + Terms terms = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); + + assertEquals(7, terms.getBuckets().size()); + assertEquals("authors", terms.getName()); + + Terms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("d", bucket.getKeyAsString()); + Max numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(3, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(1); + assertEquals("f", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(14, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(2); + assertEquals("g", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(18, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(3); + assertEquals("e", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(23, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(4); + assertEquals("c", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(39, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(5); + assertEquals("b", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(50, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(6); + assertEquals("a", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(70, (int) numPages.getValue()); + + // reverse order: + termsBuilder = new TermsAggregationBuilder("authors", ValueType.STRING) + .field("author").order(BucketOrder.aggregation("chapters>num_pages.value", false)); + nestedBuilder = new NestedAggregationBuilder("chapters", "nested_chapters"); + maxAgg = new MaxAggregationBuilder("num_pages").field("num_pages"); + nestedBuilder.subAggregation(maxAgg); + termsBuilder.subAggregation(nestedBuilder); + + terms = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); + + assertEquals(7, terms.getBuckets().size()); + assertEquals("authors", terms.getName()); + + bucket = terms.getBuckets().get(0); + assertEquals("a", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(70, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(1); + assertEquals("b", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(50, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(2); + assertEquals("c", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(39, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(3); + assertEquals("e", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(23, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(4); + assertEquals("g", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(18, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(5); + assertEquals("f", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(14, (int) numPages.getValue()); + + bucket = terms.getBuckets().get(6); + assertEquals("d", bucket.getKeyAsString()); + numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(3, (int) numPages.getValue()); + } + } + } + + public void testNestedOrdering_random() throws IOException { + int numBooks = randomIntBetween(32, 512); + List> books = new ArrayList<>(); + for (int i = 0; i < numBooks; i++) { + int numChapters = randomIntBetween(1, 8); + int[] chapters = new int[numChapters]; + for (int j = 0; j < numChapters; j++) { + chapters[j] = randomIntBetween(2, 64); + } + books.add(Tuple.tuple(String.format(Locale.ROOT, "%03d", i), chapters)); + } + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + int id = 0; + for (Tuple book : books) { + iw.addDocuments(generateBook( + String.format(Locale.ROOT, "%03d", id), new String[]{book.v1()}, book.v2()) + ); + id++; + } + } + for (Tuple book : books) { + Arrays.sort(book.v2()); + } + books.sort((o1, o2) -> { + int cmp = Integer.compare(o1.v2()[0], o2.v2()[0]); + if (cmp == 0) { + return o1.v1().compareTo(o2.v1()); + } else { + return cmp; + } + }); + try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { + MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType1.setName("num_pages"); + MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType(); + fieldType2.setHasDocValues(true); + fieldType2.setName("author"); + + TermsAggregationBuilder termsBuilder = new TermsAggregationBuilder("authors", ValueType.STRING) + .size(books.size()).field("author") + .order(BucketOrder.compound(BucketOrder.aggregation("chapters>num_pages.value", true), BucketOrder.key(true))); + NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder("chapters", "nested_chapters"); + MinAggregationBuilder minAgg = new MinAggregationBuilder("num_pages").field("num_pages"); + nestedBuilder.subAggregation(minAgg); + termsBuilder.subAggregation(nestedBuilder); + + Terms terms = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); + + assertEquals(books.size(), terms.getBuckets().size()); + assertEquals("authors", terms.getName()); + + for (int i = 0; i < books.size(); i++) { + Tuple book = books.get(i); + Terms.Bucket bucket = terms.getBuckets().get(i); + assertEquals(book.v1(), bucket.getKeyAsString()); + Min numPages = ((Nested) bucket.getAggregations().get("chapters")).getAggregations().get("num_pages"); + assertEquals(book.v2()[0], (int) numPages.getValue()); + } + } + } + } + private double generateMaxDocs(List documents, int numNestedDocs, int id, String path, String fieldName) { return DoubleStream.of(generateDocuments(documents, numNestedDocs, id, path, fieldName)) .max().orElse(Double.NEGATIVE_INFINITY); @@ -340,4 +536,26 @@ private double[] generateDocuments(List documents, int numNestedDocs, return values; } + private List generateBook(String id, String[] authors, int[] numPages) { + List documents = new ArrayList<>(); + + for (int numPage : numPages) { + Document document = new Document(); + document.add(new Field(UidFieldMapper.NAME, "book#" + id, UidFieldMapper.Defaults.NESTED_FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "__nested_chapters", TypeFieldMapper.Defaults.FIELD_TYPE)); + document.add(new SortedNumericDocValuesField("num_pages", numPage)); + documents.add(document); + } + + Document document = new Document(); + document.add(new Field(UidFieldMapper.NAME, "book#" + id, UidFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "book", TypeFieldMapper.Defaults.FIELD_TYPE)); + for (String author : authors) { + document.add(new SortedSetDocValuesField("author", new BytesRef(author))); + } + documents.add(document); + + return documents; + } + } From ff1e26276d8cd3f04c539951f5023fd1db499ca3 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 20 Sep 2017 10:30:21 +0200 Subject: [PATCH 04/13] Deguice ActionFilter (#26691) Allows to instantiate TransportAction instances without Guice. --- .../elasticsearch/action/ActionModule.java | 18 ++++++----- .../action/support/ActionFilters.java | 3 -- .../java/org/elasticsearch/node/Node.java | 10 +++--- .../elasticsearch/plugins/ActionPlugin.java | 3 +- .../cluster/ClusterInfoServiceIT.java | 17 +++++----- .../ReindexFromRemoteWithAuthTests.java | 28 ++++++++++++++--- .../http/ContextAndHeaderTransportIT.java | 31 ++++++++++--------- 7 files changed, 67 insertions(+), 43 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 6cfd89d2d26bd..86582e9b8d046 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -315,6 +315,7 @@ import org.elasticsearch.usage.UsageService; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -341,7 +342,7 @@ public class ActionModule extends AbstractModule { private final SettingsFilter settingsFilter; private final List actionPlugins; private final Map> actions; - private final List> actionFilters; + private final ActionFilters actionFilters; private final AutoCreateIndex autoCreateIndex; private final DestructiveOperations destructiveOperations; private final RestController restController; @@ -503,8 +504,9 @@ public void reg return unmodifiableMap(actions.getRegistry()); } - private List> setupActionFilters(List actionPlugins) { - return unmodifiableList(actionPlugins.stream().flatMap(p -> p.getActionFilters().stream()).collect(Collectors.toList())); + private ActionFilters setupActionFilters(List actionPlugins) { + return new ActionFilters( + Collections.unmodifiableSet(actionPlugins.stream().flatMap(p -> p.getActionFilters().stream()).collect(Collectors.toSet()))); } public void initRestHandlers(Supplier nodesInCluster) { @@ -649,11 +651,7 @@ public void initRestHandlers(Supplier nodesInCluster) { @Override protected void configure() { - Multibinder actionFilterMultibinder = Multibinder.newSetBinder(binder(), ActionFilter.class); - for (Class actionFilter : actionFilters) { - actionFilterMultibinder.addBinding().to(actionFilter); - } - bind(ActionFilters.class).asEagerSingleton(); + bind(ActionFilters.class).toInstance(actionFilters); bind(DestructiveOperations.class).toInstance(destructiveOperations); if (false == transportClient) { @@ -676,6 +674,10 @@ protected void configure() { } } + public ActionFilters getActionFilters() { + return actionFilters; + } + public RestController getRestController() { return restController; } diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilters.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilters.java index 426129a66d6ab..c66bac31a3dee 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ActionFilters.java +++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilters.java @@ -19,8 +19,6 @@ package org.elasticsearch.action.support; -import org.elasticsearch.common.inject.Inject; - import java.util.Arrays; import java.util.Comparator; import java.util.Set; @@ -32,7 +30,6 @@ public class ActionFilters { private final ActionFilter[] filters; - @Inject public ActionFilters(Set actionFilters) { this.filters = actionFilters.toArray(new ActionFilter[actionFilters.size()]); Arrays.sort(filters, new Comparator() { diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index cee85c9619912..5688dfc3f875e 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -361,10 +361,6 @@ protected Node(final Environment environment, Collection CircuitBreakerService circuitBreakerService = createCircuitBreakerService(settingsModule.getSettings(), settingsModule.getClusterSettings()); resourcesToClose.add(circuitBreakerService); - ActionModule actionModule = new ActionModule(false, settings, clusterModule.getIndexNameExpressionResolver(), - settingsModule.getIndexScopedSettings(), settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), - threadPool, pluginsService.filterPlugins(ActionPlugin.class), client, circuitBreakerService, usageService); - modules.add(actionModule); modules.add(new GatewayModule()); @@ -400,6 +396,12 @@ protected Node(final Environment environment, Collection scriptModule.getScriptService(), xContentRegistry, environment, nodeEnvironment, namedWriteableRegistry).stream()) .collect(Collectors.toList()); + + ActionModule actionModule = new ActionModule(false, settings, clusterModule.getIndexNameExpressionResolver(), + settingsModule.getIndexScopedSettings(), settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), + threadPool, pluginsService.filterPlugins(ActionPlugin.class), client, circuitBreakerService, usageService); + modules.add(actionModule); + final RestController restController = actionModule.getRestController(); final NetworkModule networkModule = new NetworkModule(settings, false, pluginsService.filterPlugins(NetworkPlugin.class), threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService, restController); diff --git a/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index 346bf491d619b..377da56f6018b 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; @@ -66,7 +65,7 @@ public interface ActionPlugin { /** * Action filters added by this plugin. */ - default List> getActionFilters() { + default List getActionFilters() { return Collections.emptyList(); } /** diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index b5e7a1201f969..0f16b4d7f1169 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.support.ActionFilter; @@ -35,7 +34,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; @@ -48,9 +46,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.ConnectionProfile; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; @@ -80,16 +75,22 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { public static class TestPlugin extends Plugin implements ActionPlugin { + + private final BlockingActionFilter blockingActionFilter; + + public TestPlugin(Settings settings) { + blockingActionFilter = new BlockingActionFilter(settings); + } + @Override - public List> getActionFilters() { - return singletonList(BlockingActionFilter.class); + public List getActionFilters() { + return singletonList(blockingActionFilter); } } public static class BlockingActionFilter extends org.elasticsearch.action.support.ActionFilter.Simple { private Set blockedActions = emptySet(); - @Inject public BlockingActionFilter(Settings settings) { super(settings); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java index c4b5c26e5c4ef..31077c405d8e1 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; @@ -29,23 +30,31 @@ import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Netty4Plugin; +import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.Before; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -129,9 +138,21 @@ public void testReindexWithBadAuthentication() throws Exception { * Plugin that demands authentication. */ public static class TestPlugin extends Plugin implements ActionPlugin { + + private final SetOnce testFilter = new SetOnce<>(); + + @Override + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + testFilter.set(new ReindexFromRemoteWithAuthTests.TestFilter(threadPool)); + return Collections.emptyList(); + } + @Override - public List> getActionFilters() { - return singletonList(ReindexFromRemoteWithAuthTests.TestFilter.class); + public List getActionFilters() { + return singletonList(testFilter.get()); } @Override @@ -153,7 +174,6 @@ public static class TestFilter implements ActionFilter { private static final String EXAMPLE_HEADER = "Example-Header"; private final ThreadContext context; - @Inject public TestFilter(ThreadPool threadPool) { context = threadPool.getThreadContext(); } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index 28b5825513f8f..749c03598a378 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.http; import org.apache.http.message.BasicHeader; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -31,12 +32,14 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.Response; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; @@ -46,8 +49,10 @@ import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.After; import org.junit.Before; @@ -282,21 +287,20 @@ private Client transportClient() { public static class ActionLoggingPlugin extends Plugin implements ActionPlugin { - @Override - public Collection createGuiceModules() { - return Collections.singletonList(new ActionLoggingModule()); - } + private final SetOnce loggingFilter = new SetOnce<>(); @Override - public List> getActionFilters() { - return singletonList(LoggingFilter.class); + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + loggingFilter.set(new LoggingFilter(clusterService.getSettings(), threadPool)); + return Collections.emptyList(); } - } - public static class ActionLoggingModule extends AbstractModule { @Override - protected void configure() { - bind(LoggingFilter.class).asEagerSingleton(); + public List getActionFilters() { + return singletonList(loggingFilter.get()); } } @@ -305,7 +309,6 @@ public static class LoggingFilter extends ActionFilter.Simple { private final ThreadPool threadPool; - @Inject public LoggingFilter(Settings settings, ThreadPool pool) { super(settings); this.threadPool = pool; From b3819e7f308f7cc6017d4473c2b0817f2efc2b7c Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 20 Sep 2017 11:36:10 +0200 Subject: [PATCH 05/13] Make RestHighLevelClient's Request class public (#26627) Request class is currently package protected, making it difficult for the users to extend the RestHighLevelClient and to use its protected methods to execute requests. This commit makes the Request class public and changes few methods of RestHighLevelClient to be protected. --- .../org/elasticsearch/client/Request.java | 41 ++++-- .../client/RestHighLevelClient.java | 10 +- .../CustomRestHighLevelClientTests.java | 67 ++++++--- .../elasticsearch/client/RequestTests.java | 132 +++++++++++------- .../client/RestHighLevelClientExtTests.java | 2 +- 5 files changed, 165 insertions(+), 87 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index 77e501551cd53..7a95553c3c003 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -63,30 +63,47 @@ import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.StringJoiner; -final class Request { +public final class Request { static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON; - final String method; - final String endpoint; - final Map params; - final HttpEntity entity; + private final String method; + private final String endpoint; + private final Map parameters; + private final HttpEntity entity; - Request(String method, String endpoint, Map params, HttpEntity entity) { - this.method = method; - this.endpoint = endpoint; - this.params = params; + public Request(String method, String endpoint, Map parameters, HttpEntity entity) { + this.method = Objects.requireNonNull(method, "method cannot be null"); + this.endpoint = Objects.requireNonNull(endpoint, "endpoint cannot be null"); + this.parameters = Objects.requireNonNull(parameters, "parameters cannot be null"); this.entity = entity; } + public String getMethod() { + return method; + } + + public String getEndpoint() { + return endpoint; + } + + public Map getParameters() { + return parameters; + } + + public HttpEntity getEntity() { + return entity; + } + @Override public String toString() { return "Request{" + "method='" + method + '\'' + ", endpoint='" + endpoint + '\'' + - ", params=" + params + + ", params=" + parameters + ", hasBody=" + (entity != null) + '}'; } @@ -233,7 +250,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { static Request exists(GetRequest getRequest) { Request request = get(getRequest); - return new Request(HttpHead.METHOD_NAME, request.endpoint, request.params, null); + return new Request(HttpHead.METHOD_NAME, request.endpoint, request.parameters, null); } static Request get(GetRequest getRequest) { @@ -381,7 +398,7 @@ static String endpoint(String... parts) { * @return the {@link ContentType} */ @SuppressForbidden(reason = "Only allowed place to convert a XContentType to a ContentType") - static ContentType createContentType(final XContentType xContentType) { + public static ContentType createContentType(final XContentType xContentType) { return ContentType.create(xContentType.mediaTypeWithoutParameters(), (Charset) null); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 27d73dabadf60..25697abb82edf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -425,7 +425,7 @@ protected Resp performRequest(Req request, Request req = requestConverter.apply(request); Response response; try { - response = client.performRequest(req.method, req.endpoint, req.params, req.entity, headers); + response = client.performRequest(req.getMethod(), req.getEndpoint(), req.getParameters(), req.getEntity(), headers); } catch (ResponseException e) { if (ignores.contains(e.getResponse().getStatusLine().getStatusCode())) { try { @@ -474,7 +474,7 @@ protected void performRequestAsync(Req request } ResponseListener responseListener = wrapResponseListener(responseConverter, listener, ignores); - client.performRequestAsync(req.method, req.endpoint, req.params, req.entity, responseListener, headers); + client.performRequestAsync(req.getMethod(), req.getEndpoint(), req.getParameters(), req.getEntity(), responseListener, headers); } ResponseListener wrapResponseListener(CheckedFunction responseConverter, @@ -522,7 +522,7 @@ public void onFailure(Exception exception) { * that wraps the original {@link ResponseException}. The potential exception obtained while parsing is added to the returned * exception as a suppressed exception. This method is guaranteed to not throw any exception eventually thrown while parsing. */ - ElasticsearchStatusException parseResponseException(ResponseException responseException) { + protected ElasticsearchStatusException parseResponseException(ResponseException responseException) { Response response = responseException.getResponse(); HttpEntity entity = response.getEntity(); ElasticsearchStatusException elasticsearchException; @@ -542,8 +542,8 @@ ElasticsearchStatusException parseResponseException(ResponseException responseEx return elasticsearchException; } - Resp parseEntity( - HttpEntity entity, CheckedFunction entityParser) throws IOException { + protected Resp parseEntity(final HttpEntity entity, + final CheckedFunction entityParser) throws IOException { if (entity == null) { throw new IllegalStateException("Response body expected but not returned"); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java index a7aac3ec037f3..f8c191252804a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java @@ -22,14 +22,12 @@ import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.RequestLine; import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; import org.apache.http.message.BasicHeader; -import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicRequestLine; import org.apache.http.message.BasicStatusLine; import org.apache.lucene.util.BytesRef; @@ -38,6 +36,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.main.MainRequest; import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.xcontent.XContentHelper; @@ -48,11 +52,14 @@ import java.io.IOException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; +import java.util.Arrays; import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.elasticsearch.client.ESRestHighLevelClientTestCase.execute; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyMapOf; import static org.mockito.Matchers.anyObject; @@ -60,6 +67,7 @@ import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * Test and demonstrates how {@link RestHighLevelClient} can be extended to support custom endpoints. @@ -92,31 +100,45 @@ public void testCustomEndpoint() throws IOException { final MainRequest request = new MainRequest(); final Header header = new BasicHeader("node_name", randomAlphaOfLengthBetween(1, 10)); - MainResponse response = execute(request, restHighLevelClient::custom, restHighLevelClient::customAsync, header); + MainResponse response = restHighLevelClient.custom(request, header); assertEquals(header.getValue(), response.getNodeName()); - response = execute(request, restHighLevelClient::customAndParse, restHighLevelClient::customAndParseAsync, header); + response = restHighLevelClient.customAndParse(request, header); assertEquals(header.getValue(), response.getNodeName()); } + public void testCustomEndpointAsync() throws Exception { + final MainRequest request = new MainRequest(); + final Header header = new BasicHeader("node_name", randomAlphaOfLengthBetween(1, 10)); + + PlainActionFuture future = PlainActionFuture.newFuture(); + restHighLevelClient.customAsync(request, future, header); + assertEquals(header.getValue(), future.get().getNodeName()); + + future = PlainActionFuture.newFuture(); + restHighLevelClient.customAndParseAsync(request, future, header); + assertEquals(header.getValue(), future.get().getNodeName()); + } + /** * The {@link RestHighLevelClient} must declare the following execution methods using the protected modifier * so that they can be used by subclasses to implement custom logic. */ @SuppressForbidden(reason = "We're forced to uses Class#getDeclaredMethods() here because this test checks protected methods") public void testMethodsVisibility() throws ClassNotFoundException { - String[] methodNames = new String[]{"performRequest", "performRequestAndParseEntity", "performRequestAsync", - "performRequestAsyncAndParseEntity"}; - for (String methodName : methodNames) { - boolean found = false; - for (Method method : RestHighLevelClient.class.getDeclaredMethods()) { - if (method.getName().equals(methodName)) { - assertTrue("Method " + methodName + " must be protected", Modifier.isProtected(method.getModifiers())); - found = true; - } - } - assertTrue("Failed to find method " + methodName, found); - } + final String[] methodNames = new String[]{"performRequest", + "performRequestAsync", + "performRequestAndParseEntity", + "performRequestAsyncAndParseEntity", + "parseEntity", + "parseResponseException"}; + + final List protectedMethods = Arrays.stream(RestHighLevelClient.class.getDeclaredMethods()) + .filter(method -> Modifier.isProtected(method.getModifiers())) + .map(Method::getName) + .collect(Collectors.toList()); + + assertThat(protectedMethods, containsInAnyOrder(methodNames)); } /** @@ -135,15 +157,20 @@ private Void mockPerformRequestAsync(Header httpHeader, ResponseListener respons * Mocks the synchronous request execution like if it was executed by Elasticsearch. */ private Response mockPerformRequest(Header httpHeader) throws IOException { + final Response mockResponse = mock(Response.class); + when(mockResponse.getHost()).thenReturn(new HttpHost("localhost", 9200)); + ProtocolVersion protocol = new ProtocolVersion("HTTP", 1, 1); - HttpResponse httpResponse = new BasicHttpResponse(new BasicStatusLine(protocol, 200, "OK")); + when(mockResponse.getStatusLine()).thenReturn(new BasicStatusLine(protocol, 200, "OK")); MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT, true); BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef(); - httpResponse.setEntity(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); + when(mockResponse.getEntity()).thenReturn(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); RequestLine requestLine = new BasicRequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol); - return new Response(requestLine, new HttpHost("localhost", 9200), httpResponse); + when(mockResponse.getRequestLine()).thenReturn(requestLine); + + return mockResponse; } /** diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java index f7996bec924ef..8f52eb37fe95d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -21,6 +21,8 @@ import org.apache.http.HttpEntity; import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; @@ -64,6 +66,8 @@ import java.io.IOException; import java.io.InputStream; +import java.lang.reflect.Constructor; +import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -77,20 +81,50 @@ public class RequestTests extends ESTestCase { + public void testConstructor() throws Exception { + final String method = randomFrom("GET", "PUT", "POST", "HEAD", "DELETE"); + final String endpoint = randomAlphaOfLengthBetween(1, 10); + final Map parameters = singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)); + final HttpEntity entity = randomBoolean() ? new StringEntity(randomAlphaOfLengthBetween(1, 100), ContentType.TEXT_PLAIN) : null; + + NullPointerException e = expectThrows(NullPointerException.class, () -> new Request(null, endpoint, parameters, entity)); + assertEquals("method cannot be null", e.getMessage()); + + e = expectThrows(NullPointerException.class, () -> new Request(method, null, parameters, entity)); + assertEquals("endpoint cannot be null", e.getMessage()); + + e = expectThrows(NullPointerException.class, () -> new Request(method, endpoint, null, entity)); + assertEquals("parameters cannot be null", e.getMessage()); + + final Request request = new Request(method, endpoint, parameters, entity); + assertEquals(method, request.getMethod()); + assertEquals(endpoint, request.getEndpoint()); + assertEquals(parameters, request.getParameters()); + assertEquals(entity, request.getEntity()); + + final Constructor[] constructors = Request.class.getConstructors(); + assertEquals("Expected only 1 constructor", 1, constructors.length); + assertTrue("Request constructor is not public", Modifier.isPublic(constructors[0].getModifiers())); + } + + public void testClassVisibility() throws Exception { + assertTrue("Request class is not public", Modifier.isPublic(Request.class.getModifiers())); + } + public void testPing() { Request request = Request.ping(); - assertEquals("/", request.endpoint); - assertEquals(0, request.params.size()); - assertNull(request.entity); - assertEquals("HEAD", request.method); + assertEquals("/", request.getEndpoint()); + assertEquals(0, request.getParameters().size()); + assertNull(request.getEntity()); + assertEquals("HEAD", request.getMethod()); } public void testInfo() { Request request = Request.info(); - assertEquals("/", request.endpoint); - assertEquals(0, request.params.size()); - assertNull(request.entity); - assertEquals("GET", request.method); + assertEquals("/", request.getEndpoint()); + assertEquals(0, request.getParameters().size()); + assertNull(request.getEntity()); + assertEquals("GET", request.getMethod()); } public void testGet() { @@ -124,10 +158,10 @@ public void testDelete() throws IOException { } Request request = Request.delete(deleteRequest); - assertEquals("/" + index + "/" + type + "/" + id, request.endpoint); - assertEquals(expectedParams, request.params); - assertEquals("DELETE", request.method); - assertNull(request.entity); + assertEquals("/" + index + "/" + type + "/" + id, request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertEquals("DELETE", request.getMethod()); + assertNull(request.getEntity()); } public void testExists() { @@ -200,10 +234,10 @@ private static void getAndExistsTest(Function requestConver } } Request request = requestConverter.apply(getRequest); - assertEquals("/" + index + "/" + type + "/" + id, request.endpoint); - assertEquals(expectedParams, request.params); - assertNull(request.entity); - assertEquals(method, request.method); + assertEquals("/" + index + "/" + type + "/" + id, request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertNull(request.getEntity()); + assertEquals(method, request.getMethod()); } public void testIndex() throws IOException { @@ -267,16 +301,16 @@ public void testIndex() throws IOException { Request request = Request.index(indexRequest); if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) { - assertEquals("/" + index + "/" + type + "/" + id + "/_create", request.endpoint); + assertEquals("/" + index + "/" + type + "/" + id + "/_create", request.getEndpoint()); } else if (id != null) { - assertEquals("/" + index + "/" + type + "/" + id, request.endpoint); + assertEquals("/" + index + "/" + type + "/" + id, request.getEndpoint()); } else { - assertEquals("/" + index + "/" + type, request.endpoint); + assertEquals("/" + index + "/" + type, request.getEndpoint()); } - assertEquals(expectedParams, request.params); - assertEquals(method, request.method); + assertEquals(expectedParams, request.getParameters()); + assertEquals(method, request.getMethod()); - HttpEntity entity = request.entity; + HttpEntity entity = request.getEntity(); assertTrue(entity instanceof ByteArrayEntity); assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue()); try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) { @@ -367,11 +401,11 @@ public void testUpdate() throws IOException { } Request request = Request.update(updateRequest); - assertEquals("/" + index + "/" + type + "/" + id + "/_update", request.endpoint); - assertEquals(expectedParams, request.params); - assertEquals("POST", request.method); + assertEquals("/" + index + "/" + type + "/" + id + "/_update", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertEquals("POST", request.getMethod()); - HttpEntity entity = request.entity; + HttpEntity entity = request.getEntity(); assertTrue(entity instanceof ByteArrayEntity); UpdateRequest parsedUpdateRequest = new UpdateRequest(); @@ -485,12 +519,12 @@ public void testBulk() throws IOException { } Request request = Request.bulk(bulkRequest); - assertEquals("/_bulk", request.endpoint); - assertEquals(expectedParams, request.params); - assertEquals("POST", request.method); - assertEquals(xContentType.mediaTypeWithoutParameters(), request.entity.getContentType().getValue()); - byte[] content = new byte[(int) request.entity.getContentLength()]; - try (InputStream inputStream = request.entity.getContent()) { + assertEquals("/_bulk", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertEquals("POST", request.getMethod()); + assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + byte[] content = new byte[(int) request.getEntity().getContentLength()]; + try (InputStream inputStream = request.getEntity().getContent()) { Streams.readFully(inputStream, content); } @@ -541,7 +575,7 @@ public void testBulkWithDifferentContentTypes() throws IOException { bulkRequest.add(new DeleteRequest("index", "type", "2")); Request request = Request.bulk(bulkRequest); - assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), request.entity.getContentType().getValue()); + assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } { XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); @@ -551,7 +585,7 @@ public void testBulkWithDifferentContentTypes() throws IOException { bulkRequest.add(new DeleteRequest("index", "type", "2")); Request request = Request.bulk(bulkRequest); - assertEquals(xContentType.mediaTypeWithoutParameters(), request.entity.getContentType().getValue()); + assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } { XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); @@ -563,7 +597,7 @@ public void testBulkWithDifferentContentTypes() throws IOException { } Request request = Request.bulk(new BulkRequest().add(updateRequest)); - assertEquals(xContentType.mediaTypeWithoutParameters(), request.entity.getContentType().getValue()); + assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } { BulkRequest bulkRequest = new BulkRequest(); @@ -712,12 +746,12 @@ public void testSearch() throws Exception { endpoint.add(type); } endpoint.add("_search"); - assertEquals(endpoint.toString(), request.endpoint); - assertEquals(expectedParams, request.params); + assertEquals(endpoint.toString(), request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); if (searchSourceBuilder == null) { - assertNull(request.entity); + assertNull(request.getEntity()); } else { - assertToXContentBody(searchSourceBuilder, request.entity); + assertToXContentBody(searchSourceBuilder, request.getEntity()); } } @@ -728,11 +762,11 @@ public void testSearchScroll() throws IOException { searchScrollRequest.scroll(randomPositiveTimeValue()); } Request request = Request.searchScroll(searchScrollRequest); - assertEquals("GET", request.method); - assertEquals("/_search/scroll", request.endpoint); - assertEquals(0, request.params.size()); - assertToXContentBody(searchScrollRequest, request.entity); - assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.entity.getContentType().getValue()); + assertEquals("GET", request.getMethod()); + assertEquals("/_search/scroll", request.getEndpoint()); + assertEquals(0, request.getParameters().size()); + assertToXContentBody(searchScrollRequest, request.getEntity()); + assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } public void testClearScroll() throws IOException { @@ -742,11 +776,11 @@ public void testClearScroll() throws IOException { clearScrollRequest.addScrollId(randomAlphaOfLengthBetween(5, 10)); } Request request = Request.clearScroll(clearScrollRequest); - assertEquals("DELETE", request.method); - assertEquals("/_search/scroll", request.endpoint); - assertEquals(0, request.params.size()); - assertToXContentBody(clearScrollRequest, request.entity); - assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.entity.getContentType().getValue()); + assertEquals("DELETE", request.getMethod()); + assertEquals("/_search/scroll", request.getEndpoint()); + assertEquals(0, request.getParameters().size()); + assertToXContentBody(clearScrollRequest, request.getEntity()); + assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java index 3760dc93d5263..b5fb98a3bdf5e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java @@ -36,7 +36,7 @@ import static org.mockito.Mockito.mock; /** - * This test works against a {@link RestHighLevelClient} subclass that simulats how custom response sections returned by + * This test works against a {@link RestHighLevelClient} subclass that simulates how custom response sections returned by * Elasticsearch plugins can be parsed using the high level client. */ public class RestHighLevelClientExtTests extends ESTestCase { From 5f407062ad0918eb0ad556c533c2e3cbfe33895d Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 20 Sep 2017 12:51:58 +0200 Subject: [PATCH 06/13] Refactoring of Gateway*** classes (#26706) - Removes mutual dependency between GatewayMetaState and TransportNodesListGatewayMetaState - Deguices MetaDataIndexUpgradeService - Deguices GatewayMetaState - Makes Gateway the master-level component that is only responsible for coordinating the state recovery --- .../metadata/MetaDataIndexUpgradeService.java | 4 ---- .../java/org/elasticsearch/gateway/Gateway.java | 17 ++--------------- .../elasticsearch/gateway/GatewayMetaState.java | 13 ++++--------- .../elasticsearch/gateway/GatewayModule.java | 1 - .../elasticsearch/gateway/GatewayService.java | 4 +++- .../elasticsearch/gateway/MetaStateService.java | 2 +- .../TransportNodesListGatewayMetaState.java | 9 +++------ .../main/java/org/elasticsearch/node/Node.java | 9 +++++++-- .../RecoveryWithUnsupportedIndicesIT.java | 2 +- 9 files changed, 21 insertions(+), 40 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index 269657367dcfa..2ff5fd5c2b217 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -18,13 +18,11 @@ */ package org.elasticsearch.cluster.metadata; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -35,7 +33,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.plugins.Plugin; import java.util.AbstractMap; import java.util.Collection; @@ -59,7 +56,6 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { private final IndexScopedSettings indexScopedSettings; private final UnaryOperator upgraders; - @Inject public MetaDataIndexUpgradeService(Settings settings, NamedXContentRegistry xContentRegistry, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings, Collection> indexMetaDataUpgraders) { diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java index 2e258ca54de69..f4d191ac28a8a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -23,9 +23,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; @@ -39,27 +37,23 @@ import java.util.Arrays; import java.util.Map; -public class Gateway extends AbstractComponent implements ClusterStateApplier { +public class Gateway extends AbstractComponent { private final ClusterService clusterService; - private final GatewayMetaState metaState; - private final TransportNodesListGatewayMetaState listGatewayMetaState; private final int minimumMasterNodes; private final IndicesService indicesService; - public Gateway(Settings settings, ClusterService clusterService, GatewayMetaState metaState, + public Gateway(Settings settings, ClusterService clusterService, TransportNodesListGatewayMetaState listGatewayMetaState, IndicesService indicesService) { super(settings); this.indicesService = indicesService; this.clusterService = clusterService; - this.metaState = metaState; this.listGatewayMetaState = listGatewayMetaState; this.minimumMasterNodes = ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.get(settings); - clusterService.addLowPriorityApplier(this); } public void performStateRecovery(final GatewayStateRecoveredListener listener) throws GatewayException { @@ -174,13 +168,6 @@ private void logInvalidSetting(String settingType, Map.Entry e, ex); } - @Override - public void applyClusterState(final ClusterChangedEvent event) { - // order is important, first metaState, and then shardsState - // so dangling indices will be recorded - metaState.applyClusterState(event); - } - public interface GatewayStateRecoveredListener { void onSuccess(ClusterState build); diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 9d57392030ce6..719626b7e1870 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.IndexFolderUpgrader; @@ -69,15 +68,11 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateA private volatile Set previouslyWrittenIndices = emptySet(); - @Inject public GatewayMetaState(Settings settings, NodeEnvironment nodeEnv, MetaStateService metaStateService, - TransportNodesListGatewayMetaState nodesListGatewayMetaState, - MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) - throws Exception { + MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) throws IOException { super(settings); this.nodeEnv = nodeEnv; this.metaStateService = metaStateService; - nodesListGatewayMetaState.init(this); if (DiscoveryNode.isDataNode(settings)) { ensureNoPre019ShardState(nodeEnv); @@ -210,7 +205,7 @@ protected static boolean isDataOnlyNode(ClusterState state) { /** * Throws an IAE if a pre 0.19 state is detected */ - private void ensureNoPre019State() throws Exception { + private void ensureNoPre019State() throws IOException { for (Path dataLocation : nodeEnv.nodeDataPaths()) { final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME); if (!Files.exists(stateLocation)) { @@ -242,7 +237,7 @@ private void ensureNoPre019State() throws Exception { */ static MetaData upgradeMetaData(MetaData metaData, MetaDataIndexUpgradeService metaDataIndexUpgradeService, - MetaDataUpgrader metaDataUpgrader) throws Exception { + MetaDataUpgrader metaDataUpgrader) throws IOException { // upgrade index meta data boolean changed = false; final MetaData.Builder upgradedMetaData = MetaData.builder(metaData); @@ -288,7 +283,7 @@ private static boolean applyPluginUpgraders(ImmutableOpenMap list(String[] nodesIds, @Nullable TimeValue timeout) { diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 5688dfc3f875e..9ad403e2d4cda 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -100,6 +100,7 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; +import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.recovery.PeerRecoverySourceService; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -416,6 +417,10 @@ protected Node(final Environment environment, Collection Collection> indexMetaDataUpgraders = pluginsService.filterPlugins(Plugin.class).stream() .map(Plugin::getIndexMetaDataUpgrader).collect(Collectors.toList()); final MetaDataUpgrader metaDataUpgrader = new MetaDataUpgrader(customMetaDataUpgraders, indexTemplateMetaDataUpgraders); + final MetaDataIndexUpgradeService metaDataIndexUpgradeService = new MetaDataIndexUpgradeService(settings, xContentRegistry, + indicesModule.getMapperRegistry(), settingsModule.getIndexScopedSettings(), indexMetaDataUpgraders); + final GatewayMetaState gatewayMetaState = new GatewayMetaState(settings, nodeEnvironment, metaStateService, + metaDataIndexUpgradeService, metaDataUpgrader); new TemplateUpgradeService(settings, client, clusterService, threadPool, indexTemplateMetaDataUpgraders); final Transport transport = networkModule.getTransportSupplier().get(); final TransportService transportService = newTransportService(settings, transport, threadPool, @@ -475,9 +480,9 @@ protected Node(final Environment environment, Collection b.bind(TransportService.class).toInstance(transportService); b.bind(NetworkService.class).toInstance(networkService); b.bind(UpdateHelper.class).toInstance(new UpdateHelper(settings, scriptModule.getScriptService())); - b.bind(MetaDataIndexUpgradeService.class).toInstance(new MetaDataIndexUpgradeService(settings, xContentRegistry, - indicesModule.getMapperRegistry(), settingsModule.getIndexScopedSettings(), indexMetaDataUpgraders)); + b.bind(MetaDataIndexUpgradeService.class).toInstance(metaDataIndexUpgradeService); b.bind(ClusterInfoService.class).toInstance(clusterInfoService); + b.bind(GatewayMetaState.class).toInstance(gatewayMetaState); b.bind(Discovery.class).toInstance(discoveryModule.getDiscovery()); { RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index 50f328db39306..51ff79a4a2fa2 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -91,7 +91,7 @@ public void testUpgradeStartClusterOn_0_20_6() throws Exception { internalCluster().startNode(nodeSettings); fail(); } catch (Exception ex) { - assertThat(ex.getMessage(), containsString(" was created before v2.0.0.beta1 and wasn't upgraded")); + assertThat(ex.getCause().getCause().getMessage(), containsString(" was created before v2.0.0.beta1 and wasn't upgraded")); } } } From 22e200e79a461f873fd5acbf7b549c4a7fc83bf8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 20 Sep 2017 14:24:30 +0200 Subject: [PATCH 07/13] Remove deprecated type and slop field in MatchQueryBuilder (#26720) The `type` field has been deprecated in 5.0 and can be removed. It has been replaced by using the MatchPhraseQueryBuilder or the MatchPhrasePrefixQueryBuilder. The `slop` field has also been deprecated and can be removed, the phrase and phrase prefix query builders still provide this parameter. --- .../index/query/MatchQueryBuilder.java | 117 +++--------------- .../index/query/MatchQueryBuilderTests.java | 87 ------------- .../highlight/HighlighterSearchIT.java | 15 ++- .../search/query/MultiMatchQueryIT.java | 12 +- .../search/query/SearchQueryIT.java | 18 +-- .../script/mustache/SearchTemplateIT.java | 17 ++- 6 files changed, 47 insertions(+), 219 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index 029bb8ab64e0b..1b126edbd124e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; import java.io.IOException; -import java.util.Locale; import java.util.Objects; /** @@ -43,7 +42,6 @@ * result of the analysis. */ public class MatchQueryBuilder extends AbstractQueryBuilder { - public static final ParseField SLOP_FIELD = new ParseField("slop", "phrase_slop").withAllDeprecated("match_phrase query"); public static final ParseField ZERO_TERMS_QUERY_FIELD = new ParseField("zero_terms_query"); public static final ParseField CUTOFF_FREQUENCY_FIELD = new ParseField("cutoff_frequency"); public static final ParseField LENIENT_FIELD = new ParseField("lenient"); @@ -54,7 +52,6 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { public static final ParseField MAX_EXPANSIONS_FIELD = new ParseField("max_expansions"); public static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length"); public static final ParseField ANALYZER_FIELD = new ParseField("analyzer"); - public static final ParseField TYPE_FIELD = new ParseField("type").withAllDeprecated("match_phrase and match_phrase_prefix query"); public static final ParseField QUERY_FIELD = new ParseField("query"); public static final ParseField GENERATE_SYNONYMS_PHRASE_QUERY = new ParseField("auto_generate_synonyms_phrase_query"); @@ -64,24 +61,14 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { /** The default mode terms are combined in a match query */ public static final Operator DEFAULT_OPERATOR = Operator.OR; - /** The default mode match query type */ - @Deprecated - public static final MatchQuery.Type DEFAULT_TYPE = MatchQuery.Type.BOOLEAN; - private final String fieldName; private final Object value; - @Deprecated - private MatchQuery.Type type = DEFAULT_TYPE; - private Operator operator = DEFAULT_OPERATOR; private String analyzer; - @Deprecated - private int slop = MatchQuery.DEFAULT_PHRASE_SLOP; - private Fuzziness fuzziness = null; private int prefixLength = FuzzyQuery.defaultPrefixLength; @@ -123,9 +110,15 @@ public MatchQueryBuilder(StreamInput in) throws IOException { super(in); fieldName = in.readString(); value = in.readGenericValue(); - type = MatchQuery.Type.readFromStream(in); + // TODO lower this version once this has been backported to 6.0.0 + if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + MatchQuery.Type.readFromStream(in); // deprecated type + } operator = Operator.readFromStream(in); - slop = in.readVInt(); + // TODO lower this version once this has been backported to 6.0.0 + if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + in.readVInt(); // deprecated slop + } prefixLength = in.readVInt(); maxExpansions = in.readVInt(); fuzzyTranspositions = in.readBoolean(); @@ -146,9 +139,15 @@ public MatchQueryBuilder(StreamInput in) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeGenericValue(value); - type.writeTo(out); + // TODO lower this version once this has been backported to 6.0.0 + if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + MatchQuery.Type.BOOLEAN.writeTo(out); // deprecated type + } operator.writeTo(out); - out.writeVInt(slop); + // TODO lower this version once this has been backported to 6.0.0 + if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + out.writeVInt(MatchQuery.DEFAULT_PHRASE_SLOP); // deprecated slop + } out.writeVInt(prefixLength); out.writeVInt(maxExpansions); out.writeBoolean(fuzzyTranspositions); @@ -175,34 +174,6 @@ public Object value() { return this.value; } - /** - * Sets the type of the text query. - * - * @deprecated Use {@link MatchPhraseQueryBuilder} for phrase - * queries and {@link MatchPhrasePrefixQueryBuilder} for - * phrase_prefix queries - */ - @Deprecated - public MatchQueryBuilder type(MatchQuery.Type type) { - if (type == null) { - throw new IllegalArgumentException("[" + NAME + "] requires type to be non-null"); - } - this.type = type; - return this; - } - - /** - * Get the type of the query. - * - * @deprecated Use {@link MatchPhraseQueryBuilder} for phrase - * queries and {@link MatchPhrasePrefixQueryBuilder} for - * phrase_prefix queries - */ - @Deprecated - public MatchQuery.Type type() { - return this.type; - } - /** Sets the operator to use when using a boolean query. Defaults to OR. */ public MatchQueryBuilder operator(Operator operator) { if (operator == null) { @@ -231,30 +202,6 @@ public String analyzer() { return this.analyzer; } - /** - * Sets a slop factor for phrase queries - * - * @deprecated for phrase queries use {@link MatchPhraseQueryBuilder} - */ - @Deprecated - public MatchQueryBuilder slop(int slop) { - if (slop < 0 ) { - throw new IllegalArgumentException("No negative slop allowed."); - } - this.slop = slop; - return this; - } - - /** - * Get the slop factor for phrase queries. - * - * @deprecated for phrase queries use {@link MatchPhraseQueryBuilder} - */ - @Deprecated - public int slop() { - return this.slop; - } - /** Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO". */ public MatchQueryBuilder fuzziness(Object fuzziness) { this.fuzziness = Fuzziness.build(fuzziness); @@ -425,18 +372,10 @@ public void doXContent(XContentBuilder builder, Params params) throws IOExceptio builder.startObject(fieldName); builder.field(QUERY_FIELD.getPreferredName(), value); - // this is deprecated so only output the value if its not the default value (for bwc) - if (type != MatchQuery.Type.BOOLEAN) { - builder.field(TYPE_FIELD.getPreferredName(), type.toString().toLowerCase(Locale.ENGLISH)); - } builder.field(OPERATOR_FIELD.getPreferredName(), operator.toString()); if (analyzer != null) { builder.field(ANALYZER_FIELD.getPreferredName(), analyzer); } - // this is deprecated so only output the value if its not the default value (for bwc) - if (slop != MatchQuery.DEFAULT_PHRASE_SLOP) { - builder.field(SLOP_FIELD.getPreferredName(), slop); - } if (fuzziness != null) { fuzziness.toXContent(builder, params); } @@ -473,7 +412,6 @@ protected Query doToQuery(QueryShardContext context) throws IOException { if (analyzer != null) { matchQuery.setAnalyzer(analyzer); } - matchQuery.setPhraseSlop(slop); matchQuery.setFuzziness(fuzziness); matchQuery.setFuzzyPrefixLength(prefixLength); matchQuery.setMaxExpansions(maxExpansions); @@ -484,7 +422,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { matchQuery.setZeroTermsQuery(zeroTermsQuery); matchQuery.setAutoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); - Query query = matchQuery.parse(type, fieldName, value); + Query query = matchQuery.parse(MatchQuery.Type.BOOLEAN, fieldName, value); return Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch); } @@ -492,10 +430,8 @@ protected Query doToQuery(QueryShardContext context) throws IOException { protected boolean doEquals(MatchQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(value, other.value) && - Objects.equals(type, other.type) && Objects.equals(operator, other.operator) && Objects.equals(analyzer, other.analyzer) && - Objects.equals(slop, other.slop) && Objects.equals(fuzziness, other.fuzziness) && Objects.equals(prefixLength, other.prefixLength) && Objects.equals(maxExpansions, other.maxExpansions) && @@ -510,7 +446,7 @@ protected boolean doEquals(MatchQueryBuilder other) { @Override protected int doHashCode() { - return Objects.hash(fieldName, value, type, operator, analyzer, slop, + return Objects.hash(fieldName, value, operator, analyzer, fuzziness, prefixLength, maxExpansions, minimumShouldMatch, fuzzyRewrite, lenient, fuzzyTranspositions, zeroTermsQuery, cutoffFrequency, autoGenerateSynonymsPhraseQuery); } @@ -522,13 +458,11 @@ public String getWriteableName() { public static MatchQueryBuilder fromXContent(XContentParser parser) throws IOException { String fieldName = null; - MatchQuery.Type type = MatchQuery.Type.BOOLEAN; Object value = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String minimumShouldMatch = null; String analyzer = null; Operator operator = MatchQueryBuilder.DEFAULT_OPERATOR; - int slop = MatchQuery.DEFAULT_PHRASE_SLOP; Fuzziness fuzziness = null; int prefixLength = FuzzyQuery.defaultPrefixLength; int maxExpansion = FuzzyQuery.defaultMaxExpansions; @@ -553,23 +487,10 @@ public static MatchQueryBuilder fromXContent(XContentParser parser) throws IOExc } else if (token.isValue()) { if (QUERY_FIELD.match(currentFieldName)) { value = parser.objectText(); - } else if (TYPE_FIELD.match(currentFieldName)) { - String tStr = parser.text(); - if ("boolean".equals(tStr)) { - type = MatchQuery.Type.BOOLEAN; - } else if ("phrase".equals(tStr)) { - type = MatchQuery.Type.PHRASE; - } else if ("phrase_prefix".equals(tStr) || ("phrasePrefix".equals(tStr))) { - type = MatchQuery.Type.PHRASE_PREFIX; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query does not support type " + tStr); - } } else if (ANALYZER_FIELD.match(currentFieldName)) { analyzer = parser.text(); } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) { boost = parser.floatValue(); - } else if (SLOP_FIELD.match(currentFieldName)) { - slop = parser.intValue(); } else if (Fuzziness.FIELD.match(currentFieldName)) { fuzziness = Fuzziness.parse(parser); } else if (PREFIX_LENGTH_FIELD.match(currentFieldName)) { @@ -624,9 +545,7 @@ public static MatchQueryBuilder fromXContent(XContentParser parser) throws IOExc MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value); matchQuery.operator(operator); - matchQuery.type(type); matchQuery.analyzer(analyzer); - matchQuery.slop(slop); matchQuery.minimumShouldMatch(minimumShouldMatch); if (fuzziness != null) { matchQuery.fuzziness(fuzziness); diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 680382917bf76..9d5ee3e7f76f3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -24,10 +24,8 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -148,23 +146,6 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, return; } - switch (queryBuilder.type()) { - case BOOLEAN: - assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(MatchNoDocsQuery.class)) - .or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class))); - break; - case PHRASE: - assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)) - .or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class))); - break; - case PHRASE_PREFIX: - assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)) - .or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class))); - break; - } QueryShardContext context = searchContext.getQueryShardContext(); MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName()); if (query instanceof TermQuery && fieldType != null) { @@ -250,11 +231,6 @@ public void testIllegalValues() { assertEquals("[match] requires operator to be non-null", e.getMessage()); } - { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.type(null)); - assertEquals("[match] requires type to be non-null", e.getMessage()); - } - { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.zeroTermsQuery(null)); assertEquals("[match] requires zeroTermsQuery to be non-null", e.getMessage()); @@ -290,69 +266,6 @@ public void testSimpleMatchQuery() throws IOException { assertEquals(json, Operator.AND, qb.operator()); } - public void testLegacyMatchPhrasePrefixQuery() throws IOException { - MatchQueryBuilder expectedQB = new MatchQueryBuilder("message", "to be or not to be"); - expectedQB.type(Type.PHRASE_PREFIX); - expectedQB.slop(2); - expectedQB.maxExpansions(30); - String json = "{\n" + - " \"match\" : {\n" + - " \"message\" : {\n" + - " \"query\" : \"to be or not to be\",\n" + - " \"type\" : \"phrase_prefix\",\n" + - " \"operator\" : \"OR\",\n" + - " \"slop\" : 2,\n" + - " \"prefix_length\" : 0,\n" + - " \"max_expansions\" : 30,\n" + - " \"fuzzy_transpositions\" : true,\n" + - " \"lenient\" : false,\n" + - " \"zero_terms_query\" : \"NONE\",\n" + - " \"auto_generate_synonyms_phrase_query\" : true,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - " }\n" + - "}"; - MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json); - checkGeneratedJson(json, qb); - - assertEquals(json, expectedQB, qb); - - assertSerialization(qb); - - assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]", - "Deprecated field [slop] used, replaced by [match_phrase query]"); - } - - public void testLegacyMatchPhraseQuery() throws IOException { - MatchQueryBuilder expectedQB = new MatchQueryBuilder("message", "to be or not to be"); - expectedQB.type(Type.PHRASE); - expectedQB.slop(2); - String json = "{\n" + - " \"match\" : {\n" + - " \"message\" : {\n" + - " \"query\" : \"to be or not to be\",\n" + - " \"type\" : \"phrase\",\n" + - " \"operator\" : \"OR\",\n" + - " \"slop\" : 2,\n" + - " \"prefix_length\" : 0,\n" + - " \"max_expansions\" : 50,\n" + - " \"fuzzy_transpositions\" : true,\n" + - " \"lenient\" : false,\n" + - " \"zero_terms_query\" : \"NONE\",\n" + - " \"auto_generate_synonyms_phrase_query\" : true,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - " }\n" + - "}"; - MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json); - checkGeneratedJson(json, qb); - - assertEquals(json, expectedQB, qb); - assertSerialization(qb); - assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]", - "Deprecated field [slop] used, replaced by [match_phrase query]"); - } - public void testFuzzinessOnNonStringField() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 22803259faa05..1d752aab17545 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -39,7 +40,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; -import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -79,7 +79,6 @@ import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.typeQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; @@ -1474,7 +1473,7 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) + .setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field(new HighlightBuilder.Field("tags") .highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("simple"))) @@ -1485,7 +1484,7 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { equalTo("here is another one that is very long tag and has the tag token near the end")); response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) + .setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field(new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2) .fragmenter("span"))).get(); @@ -1496,7 +1495,7 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { equalTo("here is another one that is very long tag and has the tag token near the end")); assertFailures(client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) + .setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field(new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2) .fragmenter("invalid"))), @@ -1554,7 +1553,7 @@ public void testMissingStoredField() throws Exception { // This query used to fail when the field to highlight was absent SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQuery.Type.BOOLEAN)) + .setQuery(QueryBuilders.matchQuery("field", "highlight")) .highlighter( new HighlightBuilder().field(new HighlightBuilder.Field("highlight_field").fragmentSize(-1).numOfFragments(1) .fragmenter("simple"))).get(); @@ -1579,7 +1578,7 @@ public void testNumericHighlighting() throws Exception { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN)) + .setQuery(QueryBuilders.matchQuery("text", "test")) .highlighter( new HighlightBuilder().field("text").field("byte").field("short").field("int").field("long").field("float") .field("double")) @@ -1604,7 +1603,7 @@ public void testResetTwice() throws Exception { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN)) + .setQuery(QueryBuilders.matchQuery("text", "test")) .highlighter(new HighlightBuilder().field("text")).execute().actionGet(); // PatternAnalyzer will throw an exception if it is resetted twice assertHitCount(response, 1L); diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index ba7a13cf0102e..37ffda5f46a0f 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -179,7 +179,7 @@ private XContentBuilder createMapping() throws IOException { } public void testDefaults() throws ExecutionException, InterruptedException { - MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; + MatchQuery.Type type = MatchQuery.Type.BOOLEAN; SearchResponse searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") .operator(Operator.OR))).get(); @@ -270,9 +270,7 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio .addSort("_id", SortOrder.ASC) .setQuery(multiMatchQueryBuilder).get(); MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(field, builder.toString()); - if (multiMatchQueryBuilder.getType() != null) { - matchQueryBuilder.type(MatchQuery.Type.valueOf(multiMatchQueryBuilder.getType().matchQueryType().toString())); - } + SearchResponse matchResp = client().prepareSearch("test") // _id tie sort .addSort("_score", SortOrder.DESC) @@ -294,7 +292,7 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio public void testCutoffFreq() throws ExecutionException, InterruptedException { final long numDocs = client().prepareSearch("test").setSize(0) .setQuery(matchAllQuery()).get().getHits().getTotalHits(); - MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; + MatchQuery.Type type = MatchQuery.Type.BOOLEAN; Float cutoffFrequency = randomBoolean() ? Math.min(1, numDocs * 1.f / between(10, 20)) : 1.f / between(10, 20); SearchResponse searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") @@ -357,7 +355,7 @@ public void testEquivalence() { int numIters = scaledRandomIntBetween(5, 10); for (int i = 0; i < numIters; i++) { { - MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; + MatchQuery.Type type = MatchQuery.Type.BOOLEAN; MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") : multiMatchQuery("marvel hero captain america", "*_name", randomBoolean() ? "category" : "categ*"); SearchResponse left = client().prepareSearch("test").setSize(numDocs) @@ -377,7 +375,7 @@ public void testEquivalence() { } { - MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; + MatchQuery.Type type = MatchQuery.Type.BOOLEAN; String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); Operator op = randomBoolean() ? Operator.AND : Operator.OR; MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") : diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index fb2fbd337dd2b..cded514b7b35d 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -35,12 +35,10 @@ import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.index.search.MatchQuery; -import org.elasticsearch.index.search.MatchQuery.Type; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; @@ -70,6 +68,8 @@ import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; import static org.elasticsearch.index.query.QueryBuilders.idsQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; @@ -99,7 +99,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; -import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -173,10 +172,10 @@ public void testIndexOptions() throws Exception { client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox")); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(matchPhraseQuery("field2", "quick brown").slop(0)).get(); assertHitCount(searchResponse, 1L); - assertFailures(client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)), + assertFailures(client().prepareSearch().setQuery(matchPhraseQuery("field1", "quick brown").slop(0)), RestStatus.BAD_REQUEST, containsString("field:[field1] was indexed without position data; cannot run PhraseQuery")); } @@ -1407,7 +1406,7 @@ public void testMustNot() throws IOException, ExecutionException, InterruptedExc searchResponse = client().prepareSearch("test").setQuery( boolQuery() - .mustNot(matchQuery("description", "anything").type(Type.BOOLEAN)) + .mustNot(matchQuery("description", "anything")) ).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).get(); assertHitCount(searchResponse, 2L); } @@ -1853,13 +1852,14 @@ public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedE client().prepareIndex("test1", "type1", "2").setSource("field", "trying out Elasticsearch")); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field", "Johnnie la").slop(between(2,5)).type(Type.PHRASE_PREFIX)).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(matchPhrasePrefixQuery("field", "Johnnie la").slop(between(2, 5))) + .get(); assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "1"); - searchResponse = client().prepareSearch().setQuery(matchQuery("field", "trying").type(Type.PHRASE_PREFIX)).get(); + searchResponse = client().prepareSearch().setQuery(matchPhrasePrefixQuery("field", "trying")).get(); assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "2"); - searchResponse = client().prepareSearch().setQuery(matchQuery("field", "try").type(Type.PHRASE_PREFIX)).get(); + searchResponse = client().prepareSearch().setQuery(matchPhrasePrefixQuery("field", "try")).get(); assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "2"); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index 39657bc177736..69739ff2cb8ef 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -283,8 +282,10 @@ public void testIndexedTemplateOverwrite() throws Exception { for (int i = 1; i < iterations; i++) { assertAcked(client().admin().cluster().preparePutStoredScript() .setId("git01") - .setContent(new BytesArray("{\"template\":{\"query\": {\"match\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," + - "\"type\": \"ooophrase_prefix\"}}}}}"), XContentType.JSON)); + .setContent(new BytesArray( + "{\"template\":{\"query\": {\"match_phrase_prefix\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," + + "\"slop\": -1}}}}}"), + XContentType.JSON)); GetStoredScriptResponse getResponse = client().admin().cluster().prepareGetStoredScript("git01").get(); assertNotNull(getResponse.getSource()); @@ -292,24 +293,22 @@ public void testIndexedTemplateOverwrite() throws Exception { Map templateParams = new HashMap<>(); templateParams.put("P_Keyword1", "dev"); - ParsingException e = expectThrows(ParsingException.class, () -> new SearchTemplateRequestBuilder(client()) + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SearchTemplateRequestBuilder(client()) .setRequest(new SearchRequest("testindex").types("test")) .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) .get()); - assertThat(e.getMessage(), containsString("[match] query does not support type ooophrase_prefix")); - assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]"); + assertThat(e.getMessage(), containsString("No negative slop allowed")); assertAcked(client().admin().cluster().preparePutStoredScript() .setId("git01") - .setContent(new BytesArray("{\"query\": {\"match\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," + - "\"type\": \"phrase_prefix\"}}}}"), XContentType.JSON)); + .setContent(new BytesArray("{\"query\": {\"match_phrase_prefix\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," + + "\"slop\": 0}}}}"), XContentType.JSON)); SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) .setRequest(new SearchRequest("testindex").types("test")) .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) .get(); assertHitCount(searchResponse.getResponse(), 1); - assertWarnings("Deprecated field [type] used, replaced by [match_phrase and match_phrase_prefix query]"); } } From 3d67915ed5869148725f5b99412d525da305f208 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 20 Sep 2017 16:14:11 +0200 Subject: [PATCH 08/13] #26720: Set the correct bwc version after backport to 6.0 --- .../elasticsearch/index/query/MatchQueryBuilder.java | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index 1b126edbd124e..cc19603ea64d8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -110,13 +110,11 @@ public MatchQueryBuilder(StreamInput in) throws IOException { super(in); fieldName = in.readString(); value = in.readGenericValue(); - // TODO lower this version once this has been backported to 6.0.0 - if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + if (in.getVersion().before(Version.V_6_0_0_rc1)) { MatchQuery.Type.readFromStream(in); // deprecated type } operator = Operator.readFromStream(in); - // TODO lower this version once this has been backported to 6.0.0 - if (in.getVersion().before(Version.V_7_0_0_alpha1)) { + if (in.getVersion().before(Version.V_6_0_0_rc1)) { in.readVInt(); // deprecated slop } prefixLength = in.readVInt(); @@ -139,13 +137,11 @@ public MatchQueryBuilder(StreamInput in) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeGenericValue(value); - // TODO lower this version once this has been backported to 6.0.0 - if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + if (out.getVersion().before(Version.V_6_0_0_rc1)) { MatchQuery.Type.BOOLEAN.writeTo(out); // deprecated type } operator.writeTo(out); - // TODO lower this version once this has been backported to 6.0.0 - if (out.getVersion().before(Version.V_7_0_0_alpha1)) { + if (out.getVersion().before(Version.V_6_0_0_rc1)) { out.writeVInt(MatchQuery.DEFAULT_PHRASE_SLOP); // deprecated slop } out.writeVInt(prefixLength); From 86b00b84bc327af9ace1be70a80727a9ea8c41a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 20 Sep 2017 16:22:21 +0200 Subject: [PATCH 09/13] Remove parse field deprecations in query builders (#26711) The `fielddata` field and the use of the `_name` field in the short syntax of the range query have been deprecated in 5.0 and can be removed. The same goes for the deprecated `score_mode` field in HasParentQueryBuilder, the deprecated `like_text`, `ids` and `docs` parameter in the `more_like_this` query, the deprecated query name in the short version of the `regexp` query, and several deprecated alternative field names in other query builders. --- .../query/ConstantScoreQueryBuilder.java | 2 +- .../index/query/MatchPhraseQueryBuilder.java | 2 +- .../index/query/MoreLikeThisQueryBuilder.java | 23 ++----------------- .../index/query/MultiMatchQueryBuilder.java | 2 +- .../index/query/PrefixQueryBuilder.java | 2 +- .../index/query/RangeQueryBuilder.java | 13 ++--------- .../index/query/RegexpQueryBuilder.java | 12 +++------- .../index/query/support/QueryParsers.java | 2 +- .../index/query/RangeQueryBuilderTests.java | 15 ------------ .../query-dsl/multi-term-rewrite.asciidoc | 2 +- .../reference/query-dsl/prefix-query.asciidoc | 13 ----------- .../join/query/HasChildQueryBuilder.java | 4 ++-- .../join/query/HasParentQueryBuilder.java | 15 ++---------- .../join/query/ParentIdQueryBuilder.java | 2 +- .../query/HasParentQueryBuilderTests.java | 17 -------------- .../LegacyHasParentQueryBuilderTests.java | 17 -------------- .../elasticsearch/bwc/QueryBuilderBWCIT.java | 2 +- 17 files changed, 19 insertions(+), 126 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java index f1e3f7aea7780..df4e31c5955a9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java @@ -39,7 +39,7 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "constant_score"; - private static final ParseField INNER_QUERY_FIELD = new ParseField("filter", "query"); + private static final ParseField INNER_QUERY_FIELD = new ParseField("filter"); private final QueryBuilder filterBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java index 1865e30744302..1bdab8d78a81d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java @@ -38,7 +38,7 @@ */ public class MatchPhraseQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "match_phrase"; - public static final ParseField SLOP_FIELD = new ParseField("slop", "phrase_slop"); + public static final ParseField SLOP_FIELD = new ParseField("slop"); private final String fieldName; diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index b4c2b80fab567..34411d669ec3b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -96,15 +96,12 @@ private interface Field { ParseField FIELDS = new ParseField("fields"); ParseField LIKE = new ParseField("like"); ParseField UNLIKE = new ParseField("unlike"); - ParseField LIKE_TEXT = new ParseField("like_text").withAllDeprecated("like"); - ParseField IDS = new ParseField("ids").withAllDeprecated("like"); - ParseField DOCS = new ParseField("docs").withAllDeprecated("like"); ParseField MAX_QUERY_TERMS = new ParseField("max_query_terms"); ParseField MIN_TERM_FREQ = new ParseField("min_term_freq"); ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq"); ParseField MAX_DOC_FREQ = new ParseField("max_doc_freq"); - ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len"); - ParseField MAX_WORD_LENGTH = new ParseField("max_word_length", "max_word_len"); + ParseField MIN_WORD_LENGTH = new ParseField("min_word_length"); + ParseField MAX_WORD_LENGTH = new ParseField("max_word_length"); ParseField STOP_WORDS = new ParseField("stop_words"); ParseField ANALYZER = new ParseField("analyzer"); ParseField MINIMUM_SHOULD_MATCH = new ParseField("minimum_should_match"); @@ -846,8 +843,6 @@ public static MoreLikeThisQueryBuilder fromXContent(XContentParser parser) throw parseLikeField(parser, likeTexts, likeItems); } else if (Field.UNLIKE.match(currentFieldName)) { parseLikeField(parser, unlikeTexts, unlikeItems); - } else if (Field.LIKE_TEXT.match(currentFieldName)) { - likeTexts.add(parser.text()); } else if (Field.MAX_QUERY_TERMS.match(currentFieldName)) { maxQueryTerms = parser.intValue(); } else if (Field.MIN_TERM_FREQ.match(currentFieldName)) { @@ -891,20 +886,6 @@ public static MoreLikeThisQueryBuilder fromXContent(XContentParser parser) throw while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { parseLikeField(parser, unlikeTexts, unlikeItems); } - } else if (Field.IDS.match(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (!token.isValue()) { - throw new IllegalArgumentException("ids array element should only contain ids"); - } - likeItems.add(new Item(null, null, parser.text())); - } - } else if (Field.DOCS.match(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("docs array element should include an object"); - } - likeItems.add(Item.parse(parser, new Item())); - } } else if (Field.STOP_WORDS.match(currentFieldName)) { stopWords = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index ac2c9b559e86d..b439d53e6921a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -58,7 +58,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { public static final String NAME = "prefix"; - private static final ParseField PREFIX_FIELD = new ParseField("value", "prefix"); + private static final ParseField PREFIX_FIELD = new ParseField("value"); private static final ParseField REWRITE_FIELD = new ParseField("rewrite"); private final String fieldName; diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index 35350a8480ec7..14f1e16f39cbf 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -53,11 +53,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i public static final boolean DEFAULT_INCLUDE_UPPER = true; public static final boolean DEFAULT_INCLUDE_LOWER = true; - private static final ParseField FIELDDATA_FIELD = new ParseField("fielddata").withAllDeprecated("[no replacement]"); - private static final ParseField NAME_FIELD = new ParseField("_name") - .withAllDeprecated("query name is not supported in short version of range query"); - public static final ParseField LTE_FIELD = new ParseField("lte", "le"); - public static final ParseField GTE_FIELD = new ParseField("gte", "ge"); + public static final ParseField LTE_FIELD = new ParseField("lte"); + public static final ParseField GTE_FIELD = new ParseField("gte"); public static final ParseField FROM_FIELD = new ParseField("from"); public static final ParseField TO_FIELD = new ParseField("to"); private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower"); @@ -416,13 +413,7 @@ public static RangeQueryBuilder fromXContent(XContentParser parser) throws IOExc } } } else if (token.isValue()) { - if (NAME_FIELD.match(currentFieldName)) { - queryName = parser.text(); - } else if (FIELDDATA_FIELD.match(currentFieldName)) { - // ignore - } else { throw new ParsingException(parser.getTokenLocation(), "[range] query does not support [" + currentFieldName + "]"); - } } } diff --git a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index e6ae1b7d63a93..96290d9291259 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -47,8 +47,6 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder public static final int DEFAULT_FLAGS_VALUE = RegexpFlag.ALL.value(); public static final int DEFAULT_MAX_DETERMINIZED_STATES = Operations.DEFAULT_MAX_DETERMINIZED_STATES; - private static final ParseField NAME_FIELD = new ParseField("_name") - .withAllDeprecated("query name is not supported in short version of regexp query"); private static final ParseField FLAGS_VALUE_FIELD = new ParseField("flags_value"); private static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); private static final ParseField FLAGS_FIELD = new ParseField("flags"); @@ -219,13 +217,9 @@ public static RegexpQueryBuilder fromXContent(XContentParser parser) throws IOEx } } } else { - if (NAME_FIELD.match(currentFieldName)) { - queryName = parser.text(); - } else { - throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, parser.currentName()); - fieldName = currentFieldName; - value = parser.textOrNull(); - } + throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, parser.currentName()); + fieldName = currentFieldName; + value = parser.textOrNull(); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java b/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java index 62750d275b1c6..036efa75bdaa3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java @@ -25,7 +25,7 @@ public final class QueryParsers { - public static final ParseField CONSTANT_SCORE = new ParseField("constant_score", "constant_score_auto", "constant_score_filter"); + public static final ParseField CONSTANT_SCORE = new ParseField("constant_score"); public static final ParseField SCORING_BOOLEAN = new ParseField("scoring_boolean"); public static final ParseField CONSTANT_SCORE_BOOLEAN = new ParseField("constant_score_boolean"); public static final ParseField TOP_TERMS = new ParseField("top_terms_"); diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 67c3e67d39e84..4c0e2192cecb7 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -382,21 +382,6 @@ public void testNamedQueryParsing() throws IOException { " }\n" + "}"; assertNotNull(parseQuery(json)); - - final String deprecatedJson = - "{\n" + - " \"range\" : {\n" + - " \"timestamp\" : {\n" + - " \"from\" : \"2015-01-01 00:00:00\",\n" + - " \"to\" : \"now\",\n" + - " \"boost\" : 1.0\n" + - " },\n" + - " \"_name\" : \"my_range\"\n" + - " }\n" + - "}"; - - assertNotNull(parseQuery(deprecatedJson)); - assertWarnings("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]"); } public void testRewriteDateToMatchAll() throws IOException { diff --git a/docs/reference/query-dsl/multi-term-rewrite.asciidoc b/docs/reference/query-dsl/multi-term-rewrite.asciidoc index 27f31340a1957..0d327a40fdea3 100644 --- a/docs/reference/query-dsl/multi-term-rewrite.asciidoc +++ b/docs/reference/query-dsl/multi-term-rewrite.asciidoc @@ -19,7 +19,7 @@ boost. into a should clause in a boolean query, and keeps the scores as computed by the query. Note that typically such scores are meaningless to the user, and require non-trivial CPU to compute, so it's almost -always better to use `constant_score_auto`. This rewrite method will hit +always better to use `constant_score`. This rewrite method will hit too many clauses failure if it exceeds the boolean query limit (defaults to `1024`). * `constant_score_boolean`: Similar to `scoring_boolean` except scores diff --git a/docs/reference/query-dsl/prefix-query.asciidoc b/docs/reference/query-dsl/prefix-query.asciidoc index 270fc925f0c50..54d69583e990c 100644 --- a/docs/reference/query-dsl/prefix-query.asciidoc +++ b/docs/reference/query-dsl/prefix-query.asciidoc @@ -28,19 +28,6 @@ GET /_search -------------------------------------------------- // CONSOLE -Or with the `prefix` deprecated[5.0.0, Use `value`] syntax: - -[source,js] --------------------------------------------------- -GET /_search -{ "query": { - "prefix" : { "user" : { "prefix" : "ki", "boost" : 2.0 } } - } -} --------------------------------------------------- -// CONSOLE -// TEST[warning:Deprecated field [prefix] used, expected [value] instead] - This multi term query allows you to control how it gets rewritten using the <> parameter. diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index ee362b9bca78a..bece1751a46c3 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -76,8 +76,8 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder Date: Wed, 20 Sep 2017 21:07:19 +0600 Subject: [PATCH 10/13] [Docs] Fix name of character filter in example. (#26724) --- docs/reference/analysis/analyzers/custom-analyzer.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index f14759856dd05..34572acaa9650 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -202,7 +202,7 @@ POST my_index/_analyze -------------------------------------------------- // CONSOLE -<1> The `emoticon` character filter, `punctuation` tokenizer and +<1> The `emoticons` character filter, `punctuation` tokenizer and `english_stop` token filter are custom implementations which are defined in the same index settings. From b9c0d4447c92360d136761d34a50d9aa90caaed8 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 20 Sep 2017 17:53:12 +0200 Subject: [PATCH 11/13] Catch exceptions and inform handler in RemoteClusterConnection#collectNodes (#26725) This adds a missing catch block to invoke the action listener instead of bubbeling up the exception. Closes #26700 --- .../transport/RemoteClusterConnection.java | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 39fb515984f7a..c08bf9b737e95 100644 --- a/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -230,15 +230,19 @@ public String executor() { } }); }; - if (connectedNodes.size() == 0) { - // just in case if we are not connected for some reason we try to connect and if we fail we have to notify the listener - // this will cause some back pressure on the search end and eventually will cause rejections but that's fine - // we can't proceed with a search on a cluster level. - // in the future we might want to just skip the remote nodes in such a case but that can already be implemented on the - // caller end since they provide the listener. - ensureConnected(ActionListener.wrap((x) -> runnable.run(), listener::onFailure)); - } else { - runnable.run(); + try { + if (connectedNodes.size() == 0) { + // just in case if we are not connected for some reason we try to connect and if we fail we have to notify the listener + // this will cause some back pressure on the search end and eventually will cause rejections but that's fine + // we can't proceed with a search on a cluster level. + // in the future we might want to just skip the remote nodes in such a case but that can already be implemented on the + // caller end since they provide the listener. + ensureConnected(ActionListener.wrap((x) -> runnable.run(), listener::onFailure)); + } else { + runnable.run(); + } + } catch (Exception ex) { + listener.onFailure(ex); } } From c47f24d4061f51c8e831d030443afa90d73f681c Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Wed, 20 Sep 2017 10:19:42 -0600 Subject: [PATCH 12/13] BulkProcessor flush runnable preserves the thread context from creation time (#26718) When using a bulk processor, the thread context was not preserved for the flush runnable which is executed in another thread in the thread pool. This change wraps the flush runnable in a context preserving runnable so that the headers and transients from the creation time of the bulk processor are available during the execution of the flush. Closes #26596 --- .../action/bulk/BulkProcessor.java | 3 +- .../action/bulk/BulkProcessorTests.java | 100 ++++++++++++++++++ 2 files changed, 102 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 3269fbc95008f..a1aae5f860278 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -302,7 +302,8 @@ public boolean isCancelled() { }; } - return threadPool.scheduleWithFixedDelay(new Flush(), flushInterval, ThreadPool.Names.GENERIC); + final Runnable flushRunnable = threadPool.getThreadContext().preserveContext(new Flush()); + return threadPool.scheduleWithFixedDelay(flushRunnable, flushInterval, ThreadPool.Names.GENERIC); } private void executeIfNeeded() { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java new file mode 100644 index 0000000000000..9b37a2b9f4c48 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorTests.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.concurrent.CountDownLatch; +import java.util.function.BiConsumer; + +public class BulkProcessorTests extends ESTestCase { + + private ThreadPool threadPool; + + @Before + public void startThreadPool() { + threadPool = new TestThreadPool("BulkProcessorTests"); + } + + @After + public void stopThreadPool() throws InterruptedException { + terminate(threadPool); + } + + public void testBulkProcessorFlushPreservesContext() throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + final String headerKey = randomAlphaOfLengthBetween(1, 8); + final String transientKey = randomAlphaOfLengthBetween(1, 8); + final String headerValue = randomAlphaOfLengthBetween(1, 32); + final Object transientValue = new Object(); + + BiConsumer> consumer = (request, listener) -> { + ThreadContext threadContext = threadPool.getThreadContext(); + assertEquals(headerValue, threadContext.getHeader(headerKey)); + assertSame(transientValue, threadContext.getTransient(transientKey)); + latch.countDown(); + }; + + final int bulkSize = randomIntBetween(2, 32); + final TimeValue flushInterval = TimeValue.timeValueSeconds(1L); + final BulkProcessor bulkProcessor; + assertNull(threadPool.getThreadContext().getHeader(headerKey)); + assertNull(threadPool.getThreadContext().getTransient(transientKey)); + try (ThreadContext.StoredContext ctx = threadPool.getThreadContext().stashContext()) { + threadPool.getThreadContext().putHeader(headerKey, headerValue); + threadPool.getThreadContext().putTransient(transientKey, transientValue); + bulkProcessor = new BulkProcessor(consumer, BackoffPolicy.noBackoff(), new BulkProcessor.Listener() { + @Override + public void beforeBulk(long executionId, BulkRequest request) { + } + + @Override + public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + } + + @Override + public void afterBulk(long executionId, BulkRequest request, Throwable failure) { + } + }, 1, bulkSize, new ByteSizeValue(5, ByteSizeUnit.MB), flushInterval, threadPool); + } + assertNull(threadPool.getThreadContext().getHeader(headerKey)); + assertNull(threadPool.getThreadContext().getTransient(transientKey)); + + // add a single item which won't be over the size or number of items + bulkProcessor.add(new IndexRequest()); + + // wait for flush to execute + latch.await(); + + assertNull(threadPool.getThreadContext().getHeader(headerKey)); + assertNull(threadPool.getThreadContext().getTransient(transientKey)); + bulkProcessor.close(); + } +} From 06551a854932a569b451cbca012713f6f5d86671 Mon Sep 17 00:00:00 2001 From: lcawley Date: Wed, 20 Sep 2017 10:54:26 -0700 Subject: [PATCH 13/13] [DOCS] Added index-shared4 and index-shared5.asciidoc --- docs/reference/index-shared2.asciidoc | 26 -------------------------- docs/reference/index-shared3.asciidoc | 26 +++++++++++++++++++++----- docs/reference/index-shared4.asciidoc | 8 ++++++++ docs/reference/index-shared5.asciidoc | 2 ++ docs/reference/index.asciidoc | 2 ++ 5 files changed, 33 insertions(+), 31 deletions(-) create mode 100644 docs/reference/index-shared4.asciidoc create mode 100644 docs/reference/index-shared5.asciidoc diff --git a/docs/reference/index-shared2.asciidoc b/docs/reference/index-shared2.asciidoc index 0a0e3aaf57d2d..e48948079cc9f 100644 --- a/docs/reference/index-shared2.asciidoc +++ b/docs/reference/index-shared2.asciidoc @@ -1,28 +1,2 @@ include::migration/index.asciidoc[] - -include::api-conventions.asciidoc[] - -include::docs.asciidoc[] - -include::search.asciidoc[] - -include::aggregations.asciidoc[] - -include::indices.asciidoc[] - -include::cat.asciidoc[] - -include::cluster.asciidoc[] - -include::query-dsl.asciidoc[] - -include::mapping.asciidoc[] - -include::analysis.asciidoc[] - -include::modules.asciidoc[] - -include::index-modules.asciidoc[] - -include::ingest.asciidoc[] diff --git a/docs/reference/index-shared3.asciidoc b/docs/reference/index-shared3.asciidoc index cf685c15253f2..4da338186b0c8 100644 --- a/docs/reference/index-shared3.asciidoc +++ b/docs/reference/index-shared3.asciidoc @@ -1,10 +1,26 @@ -include::how-to.asciidoc[] +include::api-conventions.asciidoc[] -include::testing.asciidoc[] +include::docs.asciidoc[] -include::glossary.asciidoc[] +include::search.asciidoc[] -include::release-notes.asciidoc[] +include::aggregations.asciidoc[] -include::redirects.asciidoc[] +include::indices.asciidoc[] + +include::cat.asciidoc[] + +include::cluster.asciidoc[] + +include::query-dsl.asciidoc[] + +include::mapping.asciidoc[] + +include::analysis.asciidoc[] + +include::modules.asciidoc[] + +include::index-modules.asciidoc[] + +include::ingest.asciidoc[] diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc new file mode 100644 index 0000000000000..3d807dd98d39c --- /dev/null +++ b/docs/reference/index-shared4.asciidoc @@ -0,0 +1,8 @@ + +include::how-to.asciidoc[] + +include::testing.asciidoc[] + +include::glossary.asciidoc[] + +include::release-notes.asciidoc[] diff --git a/docs/reference/index-shared5.asciidoc b/docs/reference/index-shared5.asciidoc new file mode 100644 index 0000000000000..572522f6c8e74 --- /dev/null +++ b/docs/reference/index-shared5.asciidoc @@ -0,0 +1,2 @@ + +include::redirects.asciidoc[] diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 5fa6e1d914d6a..8aa9eef32f8bf 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -8,3 +8,5 @@ include::../Versions.asciidoc[] include::index-shared1.asciidoc[] include::index-shared2.asciidoc[] include::index-shared3.asciidoc[] +include::index-shared4.asciidoc[] +include::index-shared5.asciidoc[]