From 44c653f5a87337a02daf4737bcb34150df509dbe Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 8 Jun 2016 15:40:46 +0200 Subject: [PATCH] Upgrade to lucene-6.1.0-snapshot-3a57bea. --- .../resources/forbidden/es-all-signatures.txt | 2 - buildSrc/version.properties | 2 +- .../lucene/document/XInetAddressPoint.java | 117 ---------- .../lucene/queries/BlendedTermQuery.java | 4 +- .../apache/lucene/queries/MinDocQuery.java | 4 +- .../analyzing/XAnalyzingSuggester.java | 19 +- .../main/java/org/elasticsearch/Version.java | 2 +- .../elasticsearch/bootstrap/JavaVersion.java | 20 ++ .../common/geo/GeoHashUtils.java | 10 +- .../elasticsearch/common/geo/GeoPoint.java | 13 +- .../elasticsearch/common/geo/GeoUtils.java | 12 +- .../common/lucene/all/AllTermQuery.java | 14 ++ .../lucene/search/MatchNoDocsQuery.java | 10 + .../lucene/search/MoreLikeThisQuery.java | 4 +- .../lucene/search/MultiPhrasePrefixQuery.java | 4 +- .../function/FiltersFunctionScoreQuery.java | 4 +- .../search/function/FunctionScoreQuery.java | 4 +- .../common/settings/ClusterSettings.java | 1 + .../fielddata/ordinals/OrdinalsBuilder.java | 3 +- .../plain/AbstractIndexGeoPointFieldData.java | 3 +- .../index/mapper/core/DateFieldMapper.java | 4 +- .../mapper/core/LegacyDateFieldMapper.java | 4 +- .../mapper/internal/TypeFieldMapper.java | 4 +- .../index/mapper/ip/IpFieldMapper.java | 15 +- .../query/GeoDistanceRangeQueryBuilder.java | 9 +- .../index/query/HasChildQueryBuilder.java | 4 +- .../index/query/ScriptQueryBuilder.java | 4 +- .../search/geo/GeoDistanceRangeQuery.java | 4 +- .../index/search/geo/GeoPolygonQuery.java | 4 +- .../geo/InMemoryGeoBoundingBoxQuery.java | 4 +- .../index/shard/StoreRecovery.java | 11 +- .../indices/IndicesQueryCache.java | 221 +++++++++--------- .../geocentroid/GeoCentroidAggregator.java | 18 +- .../geocentroid/InternalGeoCentroid.java | 11 +- .../fetch/innerhits/InnerHitsContext.java | 4 +- .../search/slice/SliceQuery.java | 4 +- .../search/slice/TermsSliceQuery.java | 6 +- .../elasticsearch/bootstrap/security.policy | 7 +- .../bootstrap/test-framework.policy | 4 +- .../java/org/elasticsearch/VersionTests.java | 3 +- .../bootstrap/JavaVersionTests.java | 4 + .../AbstractGeoFieldDataTestCase.java | 6 +- .../SimpleExternalMappingTests.java | 8 +- .../mapper/geo/GeoPointFieldMapperTests.java | 42 ++-- .../geo/GeohashMappingGeoPointTests.java | 6 +- .../index/mapper/ip/IpFieldTypeTests.java | 13 +- .../query/GeoDistanceQueryBuilderTests.java | 4 +- .../query/GeoDistanceRangeQueryTests.java | 3 +- .../query/GeoPolygonQueryBuilderTests.java | 10 +- .../query/plugin/DummyQueryParserPlugin.java | 10 + .../index/shard/StoreRecoveryTests.java | 7 +- .../indices/IndicesQueryCacheTests.java | 8 +- .../indices/stats/IndexStatsIT.java | 2 + .../search/geo/GeoBoundingBoxIT.java | 2 + .../elasticsearch/search/geo/GeoFilterIT.java | 4 +- .../search/nested/SimpleNestedIT.java | 8 +- .../lucene-analyzers-common-6.0.1.jar.sha1 | 1 - ...ers-common-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-backward-codecs-6.0.1.jar.sha1 | 1 - ...ard-codecs-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-core-6.0.1.jar.sha1 | 1 - ...ucene-core-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-grouping-6.0.1.jar.sha1 | 1 - ...e-grouping-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-highlighter-6.0.1.jar.sha1 | 1 - ...ighlighter-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-join-6.0.1.jar.sha1 | 1 - ...ucene-join-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-memory-6.0.1.jar.sha1 | 1 - ...ene-memory-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-misc-6.0.1.jar.sha1 | 1 - ...ucene-misc-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-queries-6.0.1.jar.sha1 | 1 - ...ne-queries-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-queryparser-6.0.1.jar.sha1 | 1 - ...ueryparser-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-sandbox-6.0.1.jar.sha1 | 1 - ...ne-sandbox-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-spatial-6.0.1.jar.sha1 | 1 - ...ne-spatial-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-spatial-extras-6.0.1.jar.sha1 | 1 - ...ial-extras-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-spatial3d-6.0.1.jar.sha1 | 1 - ...-spatial3d-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../licenses/lucene-suggest-6.0.1.jar.sha1 | 1 - ...ne-suggest-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-expressions-6.0.1.jar.sha1 | 1 - ...xpressions-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../messy/tests/SimpleSortTests.java | 4 +- .../percolator/PercolateQuery.java | 4 +- .../percolator/PercolateQueryTests.java | 10 + .../lucene-analyzers-icu-6.0.1.jar.sha1 | 1 - ...lyzers-icu-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-analyzers-kuromoji-6.0.1.jar.sha1 | 1 - ...s-kuromoji-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-analyzers-phonetic-6.0.1.jar.sha1 | 1 - ...s-phonetic-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-analyzers-smartcn-6.0.1.jar.sha1 | 1 - ...rs-smartcn-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../lucene-analyzers-stempel-6.0.1.jar.sha1 | 1 - ...rs-stempel-6.1.0-snapshot-3a57bea.jar.sha1 | 1 + .../elasticsearch/test/ESIntegTestCase.java | 7 +- 102 files changed, 384 insertions(+), 417 deletions(-) delete mode 100644 core/src/main/java/org/apache/lucene/document/XInetAddressPoint.java delete mode 100644 distribution/licenses/lucene-analyzers-common-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-analyzers-common-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-backward-codecs-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-backward-codecs-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-core-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-core-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-grouping-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-grouping-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-highlighter-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-highlighter-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-join-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-join-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-memory-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-memory-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-misc-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-misc-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-queries-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-queries-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-queryparser-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-queryparser-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-sandbox-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-sandbox-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial-extras-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial-extras-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial3d-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial3d-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 distribution/licenses/lucene-suggest-6.0.1.jar.sha1 create mode 100644 distribution/licenses/lucene-suggest-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-6.0.1.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.1.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.1.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.1.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.1.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.1.0-snapshot-3a57bea.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.1.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.1.0-snapshot-3a57bea.jar.sha1 diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt index 0e5ce884d9d24..e31a7020282eb 100644 --- a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt @@ -31,5 +31,3 @@ org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey() @defaultMessage Soon to be removed org.apache.lucene.document.FieldType#numericType() - -org.apache.lucene.document.InetAddressPoint#newPrefixQuery(java.lang.String, java.net.InetAddress, int) @LUCENE-7232 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index d9e3908df22c2..f757eb8eef61e 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 5.0.0 -lucene = 6.0.1 +lucene = 6.1.0-snapshot-3a57bea # optional dependencies spatial4j = 0.6 diff --git a/core/src/main/java/org/apache/lucene/document/XInetAddressPoint.java b/core/src/main/java/org/apache/lucene/document/XInetAddressPoint.java deleted file mode 100644 index 580b875ce2c9b..0000000000000 --- a/core/src/main/java/org/apache/lucene/document/XInetAddressPoint.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.document; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.Arrays; - -import org.apache.lucene.search.Query; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.common.SuppressForbidden; - -/** - * Forked utility methods from Lucene's InetAddressPoint until LUCENE-7232 and - * LUCENE-7234 are released. - */ -// TODO: remove me when we upgrade to Lucene 6.1 -@SuppressForbidden(reason="uses InetAddress.getHostAddress") -public final class XInetAddressPoint { - - private XInetAddressPoint() {} - - /** The minimum value that an ip address can hold. */ - public static final InetAddress MIN_VALUE; - /** The maximum value that an ip address can hold. */ - public static final InetAddress MAX_VALUE; - static { - MIN_VALUE = InetAddressPoint.decode(new byte[InetAddressPoint.BYTES]); - byte[] maxValueBytes = new byte[InetAddressPoint.BYTES]; - Arrays.fill(maxValueBytes, (byte) 0xFF); - MAX_VALUE = InetAddressPoint.decode(maxValueBytes); - } - - /** - * Return the {@link InetAddress} that compares immediately greater than - * {@code address}. - * @throws ArithmeticException if the provided address is the - * {@link #MAX_VALUE maximum ip address} - */ - public static InetAddress nextUp(InetAddress address) { - if (address.equals(MAX_VALUE)) { - throw new ArithmeticException("Overflow: there is no greater InetAddress than " - + address.getHostAddress()); - } - byte[] delta = new byte[InetAddressPoint.BYTES]; - delta[InetAddressPoint.BYTES-1] = 1; - byte[] nextUpBytes = new byte[InetAddressPoint.BYTES]; - NumericUtils.add(InetAddressPoint.BYTES, 0, InetAddressPoint.encode(address), delta, nextUpBytes); - return InetAddressPoint.decode(nextUpBytes); - } - - /** - * Return the {@link InetAddress} that compares immediately less than - * {@code address}. - * @throws ArithmeticException if the provided address is the - * {@link #MIN_VALUE minimum ip address} - */ - public static InetAddress nextDown(InetAddress address) { - if (address.equals(MIN_VALUE)) { - throw new ArithmeticException("Underflow: there is no smaller InetAddress than " - + address.getHostAddress()); - } - byte[] delta = new byte[InetAddressPoint.BYTES]; - delta[InetAddressPoint.BYTES-1] = 1; - byte[] nextDownBytes = new byte[InetAddressPoint.BYTES]; - NumericUtils.subtract(InetAddressPoint.BYTES, 0, InetAddressPoint.encode(address), delta, nextDownBytes); - return InetAddressPoint.decode(nextDownBytes); - } - - /** - * Create a prefix query for matching a CIDR network range. - * - * @param field field name. must not be {@code null}. - * @param value any host address - * @param prefixLength the network prefix length for this address. This is also known as the subnet mask in the context of IPv4 - * addresses. - * @throws IllegalArgumentException if {@code field} is null, or prefixLength is invalid. - * @return a query matching documents with addresses contained within this network - */ - // TODO: remove me when we upgrade to Lucene 6.0.1 - public static Query newPrefixQuery(String field, InetAddress value, int prefixLength) { - if (value == null) { - throw new IllegalArgumentException("InetAddress must not be null"); - } - if (prefixLength < 0 || prefixLength > 8 * value.getAddress().length) { - throw new IllegalArgumentException("illegal prefixLength '" + prefixLength - + "'. Must be 0-32 for IPv4 ranges, 0-128 for IPv6 ranges"); - } - // create the lower value by zeroing out the host portion, upper value by filling it with all ones. - byte lower[] = value.getAddress(); - byte upper[] = value.getAddress(); - for (int i = prefixLength; i < 8 * lower.length; i++) { - int m = 1 << (7 - (i & 7)); - lower[i >> 3] &= ~m; - upper[i >> 3] |= m; - } - try { - return InetAddressPoint.newRangeQuery(field, InetAddress.getByAddress(lower), InetAddress.getByAddress(upper)); - } catch (UnknownHostException e) { - throw new AssertionError(e); // values are coming from InetAddress - } - } -} diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 564f780b8ed74..a4b94b007fd28 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -283,7 +283,7 @@ private Term[] equalsTerms() { @Override public boolean equals(Object o) { if (this == o) return true; - if (!super.equals(o)) return false; + if (sameClassAs(o) == false) return false; BlendedTermQuery that = (BlendedTermQuery) o; return Arrays.equals(equalsTerms(), that.equalsTerms()); @@ -291,7 +291,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), Arrays.hashCode(equalsTerms())); + return Objects.hash(classHash(), Arrays.hashCode(equalsTerms())); } public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final boolean disableCoord) { diff --git a/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java b/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java index 86982bfc949cd..a8b7dc9299ff0 100644 --- a/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java @@ -44,12 +44,12 @@ public MinDocQuery(int minDoc) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), minDoc); + return Objects.hash(classHash(), minDoc); } @Override public boolean equals(Object obj) { - if (super.equals(obj) == false) { + if (sameClassAs(obj) == false) { return false; } MinDocQuery that = (MinDocQuery) obj; diff --git a/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java b/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java index a9327d785e1e6..6017803b63d63 100644 --- a/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java +++ b/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java @@ -63,9 +63,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -622,8 +619,12 @@ public void build(InputIterator iterator) throws IOException { Set seenSurfaceForms = new HashSet<>(); int dedup = 0; - while (reader.read(scratch)) { - input.reset(scratch.bytes(), 0, scratch.length()); + while (true) { + BytesRef bytes = reader.next(); + if (bytes == null) { + break; + } + input.reset(bytes.bytes, bytes.offset, bytes.length); short analyzedLength = input.readShort(); analyzed.grow(analyzedLength+2); input.readBytes(analyzed.bytes(), 0, analyzedLength); @@ -631,13 +632,13 @@ public void build(InputIterator iterator) throws IOException { long cost = input.readInt(); - surface.bytes = scratch.bytes(); + surface.bytes = bytes.bytes; if (hasPayloads) { surface.length = input.readShort(); surface.offset = input.getPosition(); } else { surface.offset = input.getPosition(); - surface.length = scratch.length() - surface.offset; + surface.length = bytes.length - surface.offset; } if (previousAnalyzed == null) { @@ -679,11 +680,11 @@ public void build(InputIterator iterator) throws IOException { builder.add(scratchInts.get(), outputs.newPair(cost, BytesRef.deepCopyOf(surface))); } else { int payloadOffset = input.getPosition() + surface.length; - int payloadLength = scratch.length() - payloadOffset; + int payloadLength = bytes.length - payloadOffset; BytesRef br = new BytesRef(surface.length + 1 + payloadLength); System.arraycopy(surface.bytes, surface.offset, br.bytes, 0, surface.length); br.bytes[surface.length] = (byte) payloadSep; - System.arraycopy(scratch.bytes(), payloadOffset, br.bytes, surface.length+1, payloadLength); + System.arraycopy(bytes.bytes, payloadOffset, br.bytes, surface.length+1, payloadLength); br.length = br.bytes.length; builder.add(scratchInts.get(), outputs.newPair(cost, br)); } diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 5bdbf76265d1b..6e6e82b3fc56b 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -77,7 +77,7 @@ public class Version { public static final int V_5_0_0_alpha3_ID = 5000003; public static final Version V_5_0_0_alpha3 = new Version(V_5_0_0_alpha3_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final int V_5_0_0_ID = 5000099; - public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_1); + public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_1_0); public static final Version CURRENT = V_5_0_0; static { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JavaVersion.java b/core/src/main/java/org/elasticsearch/bootstrap/JavaVersion.java index afc2b77e211ab..4a434ebe6b7a7 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JavaVersion.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JavaVersion.java @@ -33,6 +33,13 @@ public List getVersion() { } private JavaVersion(List version) { + if (version.size() >= 2 + && version.get(0).intValue() == 1 + && version.get(1).intValue() == 8) { + // for Java 8 there is ambiguity since both 1.8 and 8 are supported, + // so we rewrite the former to the latter + version = new ArrayList<>(version.subList(1, version.size())); + } this.version = Collections.unmodifiableList(version); } @@ -75,6 +82,19 @@ public int compareTo(JavaVersion o) { return 0; } + @Override + public boolean equals(Object o) { + if (o == null || o.getClass() != getClass()) { + return false; + } + return compareTo((JavaVersion) o) == 0; + } + + @Override + public int hashCode() { + return version.hashCode(); + } + @Override public String toString() { return version.stream().map(v -> Integer.toString(v)).collect(Collectors.joining(".")); diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java index 4087704d5cdb2..9982a08f17f54 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import java.util.Collection; -import org.apache.lucene.spatial.util.GeoEncodingUtils; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BitUtil; /** @@ -39,7 +39,7 @@ public class GeoHashUtils { /** maximum precision for geohash strings */ public static final int PRECISION = 12; - private static final short MORTON_OFFSET = (GeoEncodingUtils.BITS<<1) - (PRECISION*5); + private static final short MORTON_OFFSET = (GeoPointField.BITS<<1) - (PRECISION*5); // No instance: private GeoHashUtils() { @@ -51,7 +51,7 @@ private GeoHashUtils() { public static final long longEncode(final double lon, final double lat, final int level) { // shift to appropriate level final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET); - return ((BitUtil.flipFlop(GeoEncodingUtils.mortonHash(lat, lon)) >>> msf) << 4) | level; + return ((BitUtil.flipFlop(GeoPointField.encodeLatLon(lat, lon)) >>> msf) << 4) | level; } /** @@ -117,7 +117,7 @@ public static final String stringEncode(final double lon, final double lat) { */ public static final String stringEncode(final double lon, final double lat, final int level) { // convert to geohashlong - final long ghLong = fromMorton(GeoEncodingUtils.mortonHash(lat, lon), level); + final long ghLong = fromMorton(GeoPointField.encodeLatLon(lat, lon), level); return stringEncode(ghLong); } @@ -138,7 +138,7 @@ public static final String stringEncodeFromMortonLong(long hashedVal, final int StringBuilder geoHash = new StringBuilder(); short precision = 0; - final short msf = (GeoEncodingUtils.BITS<<1)-5; + final short msf = (GeoPointField.BITS<<1)-5; long mask = 31L<>>(msf-(precision*5)))]); diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index 5d1250a51482a..96fe2826da802 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -19,12 +19,11 @@ package org.elasticsearch.common.geo; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BitUtil; import static org.elasticsearch.common.geo.GeoHashUtils.mortonEncode; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; -import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLat; -import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLon; /** * @@ -84,14 +83,14 @@ public GeoPoint resetFromString(String value) { } public GeoPoint resetFromIndexHash(long hash) { - lon = mortonUnhashLon(hash); - lat = mortonUnhashLat(hash); + lon = GeoPointField.decodeLongitude(hash); + lat = GeoPointField.decodeLatitude(hash); return this; } public GeoPoint resetFromGeoHash(String geohash) { final long hash = mortonEncode(geohash); - return this.reset(mortonUnhashLat(hash), mortonUnhashLon(hash)); + return this.reset(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash)); } public GeoPoint resetFromGeoHash(long geohashLong) { @@ -164,8 +163,4 @@ public static GeoPoint fromGeohash(String geohash) { public static GeoPoint fromGeohash(long geohashLong) { return new GeoPoint().resetFromGeoHash(geohashLong); } - - public static GeoPoint fromIndexLong(long indexLong) { - return new GeoPoint().resetFromIndexHash(indexLong); - } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index d5cc6846865c3..69ab2059ccfc3 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import static org.apache.lucene.spatial.util.GeoDistanceUtils.maxRadialDistanceMeters; import java.io.IOException; @@ -67,6 +66,9 @@ public class GeoUtils { /** Earth ellipsoid polar distance in meters */ public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS; + /** rounding error for quantized latitude and longitude values */ + public static final double TOLERANCE = 1E-6; + /** Returns the minimum between the provided distance 'initialRadius' and the * maximum distance/radius from the point 'center' before overlapping **/ @@ -468,6 +470,14 @@ public static GeoPoint parseGeoPoint(String data, GeoPoint point) { } } + /** Returns the maximum distance/radius (in meters) from the point 'center' before overlapping */ + public static double maxRadialDistanceMeters(final double centerLat, final double centerLon) { + if (Math.abs(centerLat) == MAX_LAT) { + return SloppyMath.haversinMeters(centerLat, centerLon, 0, centerLon); + } + return SloppyMath.haversinMeters(centerLat, centerLon, centerLat, (MAX_LON + centerLon) % 360); + } + private GeoUtils() { } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index 9b995f423a3fe..75f400fdc9dbb 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -45,6 +45,7 @@ import org.apache.lucene.util.SmallFloat; import java.io.IOException; +import java.util.Objects; import java.util.Set; /** @@ -63,6 +64,19 @@ public AllTermQuery(Term term) { this.term = term; } + @Override + public boolean equals(Object obj) { + if (sameClassAs(obj) == false) { + return false; + } + return Objects.equals(term, ((AllTermQuery) obj).term); + } + + @Override + public int hashCode() { + return 31 * classHash() + term.hashCode(); + } + @Override public Query rewrite(IndexReader reader) throws IOException { Query rewritten = super.rewrite(reader); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MatchNoDocsQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MatchNoDocsQuery.java index a25b4c0aa296f..9caf350926c56 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MatchNoDocsQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MatchNoDocsQuery.java @@ -66,4 +66,14 @@ public Scorer scorer(LeafReaderContext context) throws IOException { public String toString(String field) { return "MatchNoDocsQuery[\"" + reason + "\"]"; } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java index fbe0c28e3416d..06ab2b4a53012 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java @@ -84,14 +84,14 @@ public MoreLikeThisQuery(String likeText, String[] moreLikeFields, Analyzer anal @Override public int hashCode() { - return Objects.hash(super.hashCode(), boostTerms, boostTermsFactor, Arrays.hashCode(likeText), + return Objects.hash(classHash(), boostTerms, boostTermsFactor, Arrays.hashCode(likeText), maxDocFreq, maxQueryTerms, maxWordLen, minDocFreq, minTermFrequency, minWordLen, Arrays.hashCode(moreLikeFields), minimumShouldMatch, stopWords); } @Override public boolean equals(Object obj) { - if (super.equals(obj) == false) { + if (sameClassAs(obj) == false) { return false; } MoreLikeThisQuery other = (MoreLikeThisQuery) obj; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index 05006ec0db7f3..87bfdacb1c760 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -238,7 +238,7 @@ public final String toString(String f) { */ @Override public boolean equals(Object o) { - if (super.equals(o) == false) { + if (sameClassAs(o) == false) { return false; } MultiPhrasePrefixQuery other = (MultiPhrasePrefixQuery) o; @@ -252,7 +252,7 @@ && termArraysEquals(this.termArrays, other.termArrays) */ @Override public int hashCode() { - return super.hashCode() + return classHash() ^ slop ^ termArraysHashCode() ^ positions.hashCode(); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index e62f3f6665a63..3927dcd518e33 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -355,7 +355,7 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (super.equals(o) == false) { + if (sameClassAs(o) == false) { return false; } FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o; @@ -367,6 +367,6 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(filterFunctions)); + return Objects.hash(classHash(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(filterFunctions)); } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index 646076a3a17f2..be98a07a9c166 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -210,7 +210,7 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (super.equals(o) == false) { + if (sameClassAs(o) == false) { return false; } FunctionScoreQuery other = (FunctionScoreQuery) o; @@ -221,6 +221,6 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), subQuery.hashCode(), function, combineFunction, minScore, maxBoost); + return Objects.hash(classHash(), subQuery.hashCode(), function, combineFunction, minScore, maxBoost); } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 6eb8df68242d2..8c2d4dc01bf47 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -181,6 +181,7 @@ public void apply(Settings value, Settings current, Settings previous) { IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, IndicesQueryCache.INDICES_CACHE_QUERY_SIZE_SETTING, IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING, + IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING, IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, MetaData.SETTING_READ_ONLY_SETTING, diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java index e3dc84a347793..967d07174b9a9 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.util.GeoEncodingUtils; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; @@ -426,7 +425,7 @@ public static TermsEnum wrapGeoPointTerms(TermsEnum termsEnum) { protected AcceptStatus accept(BytesRef term) throws IOException { // accept only the max resolution terms // todo is this necessary? - return GeoEncodingUtils.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ? + return GeoPointField.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ? AcceptStatus.YES : AcceptStatus.END; } }; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index c18f96c06b0b8..90554bd130846 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.util.GeoEncodingUtils; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.CharsRefBuilder; @@ -58,7 +57,7 @@ public Long next() throws IOException { return null; } if (termEncoding == GeoPointField.TermEncoding.PREFIX) { - return GeoEncodingUtils.prefixCodedToGeoCoded(term); + return GeoPointField.prefixCodedToGeoCoded(term); } else if (termEncoding == GeoPointField.TermEncoding.NUMERIC) { return LegacyNumericUtils.prefixCodedToLong(term); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index a79631481d2ce..66cb7255fd6b8 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -204,7 +204,7 @@ public Query rewrite(IndexReader reader) throws IOException { @Override public boolean equals(Object o) { if (this == o) return true; - if (!super.equals(o)) return false; + if (sameClassAs(o) == false) return false; LateParsingQuery that = (LateParsingQuery) o; if (includeLower != that.includeLower) return false; @@ -218,7 +218,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone); + return Objects.hash(classHash(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java index a7e44ba3654cb..a337415395578 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java @@ -213,7 +213,7 @@ public Query rewrite(IndexReader reader) throws IOException { @Override public boolean equals(Object o) { if (this == o) return true; - if (!super.equals(o)) return false; + if (sameClassAs(o) == false) return false; LateParsingQuery that = (LateParsingQuery) o; if (includeLower != that.includeLower) return false; @@ -227,7 +227,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone); + return Objects.hash(classHash(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index f960ecaa9774b..d882be8e9d7cb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -195,7 +195,7 @@ public Query rewrite(IndexReader reader) throws IOException { @Override public boolean equals(Object obj) { - if (super.equals(obj) == false) { + if (sameClassAs(obj) == false) { return false; } TypeQuery that = (TypeQuery) obj; @@ -204,7 +204,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return 31 * super.hashCode() + type.hashCode(); + return 31 * classHash() + type.hashCode(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 1e2a078925fde..a123f64c4d6ba 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -23,7 +23,6 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; -import org.apache.lucene.document.XInetAddressPoint; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.XPointValues; @@ -176,7 +175,7 @@ public Query termQuery(Object value, @Nullable QueryShardContext context) { if (fields.length == 2) { InetAddress address = InetAddresses.forString(fields[0]); int prefixLength = Integer.parseInt(fields[1]); - return XInetAddressPoint.newPrefixQuery(name(), address, prefixLength); + return InetAddressPoint.newPrefixQuery(name(), address, prefixLength); } else { throw new IllegalArgumentException("Expected [ip/prefix] but was [" + term + "]"); } @@ -191,27 +190,27 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower failIfNotIndexed(); InetAddress lower; if (lowerTerm == null) { - lower = XInetAddressPoint.MIN_VALUE; + lower = InetAddressPoint.MIN_VALUE; } else { lower = parse(lowerTerm); if (includeLower == false) { - if (lower.equals(XInetAddressPoint.MAX_VALUE)) { + if (lower.equals(InetAddressPoint.MAX_VALUE)) { return new MatchNoDocsQuery(); } - lower = XInetAddressPoint.nextUp(lower); + lower = InetAddressPoint.nextUp(lower); } } InetAddress upper; if (upperTerm == null) { - upper = XInetAddressPoint.MAX_VALUE; + upper = InetAddressPoint.MAX_VALUE; } else { upper = parse(upperTerm); if (includeUpper == false) { - if (upper.equals(XInetAddressPoint.MIN_VALUE)) { + if (upper.equals(InetAddressPoint.MIN_VALUE)) { return new MatchNoDocsQuery(); } - upper = XInetAddressPoint.nextDown(upper); + upper = InetAddressPoint.nextDown(upper); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java index 06f30a3477e78..cf18d5a4c1034 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery; -import org.apache.lucene.spatial.util.GeoDistanceUtils; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -48,8 +47,6 @@ import java.util.Objects; import java.util.Optional; -import static org.apache.lucene.spatial.util.GeoEncodingUtils.TOLERANCE; - public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "geo_distance_range"; @@ -354,7 +351,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT); } } else { - toValue = GeoDistanceUtils.maxRadialDistanceMeters(point.lat(), point.lon()); + toValue = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon()); } final Version indexVersionCreated = context.indexVersionCreated(); @@ -371,8 +368,8 @@ protected Query doToQuery(QueryShardContext context) throws IOException { GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; return new XGeoPointDistanceRangeQuery(fieldType.name(), encoding, point.lat(), point.lon(), - (includeLower) ? fromValue : fromValue + TOLERANCE, - (includeUpper) ? toValue : toValue - TOLERANCE); + (includeLower) ? fromValue : fromValue + GeoUtils.TOLERANCE, + (includeUpper) ? toValue : toValue - GeoUtils.TOLERANCE); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java index 9fb44845f22fc..389e3812e331f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java @@ -411,7 +411,7 @@ public Query rewrite(IndexReader reader) throws IOException { @Override public boolean equals(Object o) { - if (!super.equals(o)) return false; + if (sameClassAs(o) == false) return false; LateParsingQuery that = (LateParsingQuery) o; @@ -425,7 +425,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode); + return Objects.hash(classHash(), toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 6d563f22a0601..778501f013248 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -184,7 +184,7 @@ public String toString(String field) { public boolean equals(Object obj) { if (this == obj) return true; - if (!super.equals(obj)) + if (sameClassAs(obj) == false) return false; ScriptQuery other = (ScriptQuery) obj; return Objects.equals(script, other.script); @@ -192,7 +192,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), script); + return Objects.hash(classHash(), script); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java index 6f92e411c00de..6c4fd23e64c68 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java @@ -190,7 +190,7 @@ public float matchCost() { @Override public boolean equals(Object o) { if (this == o) return true; - if (super.equals(o) == false) return false; + if (sameClassAs(o) == false) return false; GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) o; @@ -212,7 +212,7 @@ public String toString(String field) { @Override public int hashCode() { - int result = super.hashCode(); + int result = classHash(); long temp; temp = lat != +0.0d ? Double.doubleToLongBits(lat) : 0L; result = 31 * result + Long.hashCode(temp); diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java index d62aa76efd9ba..c3a52cb114ebb 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java @@ -111,7 +111,7 @@ public String toString(String field) { @Override public boolean equals(Object obj) { - if (super.equals(obj) == false) { + if (sameClassAs(obj) == false) { return false; } GeoPolygonQuery that = (GeoPolygonQuery) obj; @@ -121,7 +121,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - int h = super.hashCode(); + int h = classHash(); h = 31 * h + indexFieldData.getFieldName().hashCode(); h = 31 * h + Arrays.hashCode(points); return h; diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java index 2f2801a2abeea..789ee25e1b502 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/InMemoryGeoBoundingBoxQuery.java @@ -84,7 +84,7 @@ public String toString(String field) { @Override public boolean equals(Object obj) { - if (super.equals(obj) == false) { + if (sameClassAs(obj) == false) { return false; } InMemoryGeoBoundingBoxQuery other = (InMemoryGeoBoundingBoxQuery) obj; @@ -95,7 +95,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), fieldName(), topLeft, bottomRight); + return Objects.hash(classHash(), fieldName(), topLeft, bottomRight); } private static class Meridian180GeoBoundingBoxBits implements Bits { diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index 62173f936c50f..dbfcad6048a16 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -131,16 +131,7 @@ boolean recoverFromLocalShards(BiConsumer mappingUpdate } final void addIndices(RecoveryState.Index indexRecoveryStats, Directory target, Directory... sources) throws IOException { - /* - * TODO: once we upgraded to Lucene 6.1 use HardlinkCopyDirectoryWrapper to enable hardlinks if possible and enable it - * in the security.policy: - * - * grant codeBase "${codebase.lucene-misc-6.1.0.jar}" { - * // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper - * permission java.nio.file.LinkPermission "hard"; - * }; - * target = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target); - */ + target = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target); try (IndexWriter writer = new IndexWriter(new StatsDirectoryWrapper(target, indexRecoveryStats), new IndexWriterConfig(null) .setCommitOnClose(false) diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index bd01e7f0183f2..70b9443e04320 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -45,6 +45,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Predicate; public class IndicesQueryCache extends AbstractComponent implements QueryCache, Closeable { @@ -52,6 +53,9 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache, "indices.queries.cache.size", "10%", Property.NodeScope); public static final Setting INDICES_CACHE_QUERY_COUNT_SETTING = Setting.intSetting( "indices.queries.cache.count", 10000, 1, Property.NodeScope); + // enables caching on all segments instead of only the larger ones, for testing only + public static final Setting INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING = Setting.boolSetting( + "indices.queries.cache.all_segments", false, Property.NodeScope); private final LRUQueryCache cache; private final ShardCoreKeyMap shardKeyMap = new ShardCoreKeyMap(); @@ -69,111 +73,11 @@ public IndicesQueryCache(Settings settings) { final int count = INDICES_CACHE_QUERY_COUNT_SETTING.get(settings); logger.debug("using [node] query cache with size [{}] max filter count [{}]", size, count); - cache = new LRUQueryCache(count, size.bytes()) { - - private Stats getStats(Object coreKey) { - final ShardId shardId = shardKeyMap.getShardId(coreKey); - if (shardId == null) { - return null; - } - return shardStats.get(shardId); - } - - private Stats getOrCreateStats(Object coreKey) { - final ShardId shardId = shardKeyMap.getShardId(coreKey); - Stats stats = shardStats.get(shardId); - if (stats == null) { - stats = new Stats(); - shardStats.put(shardId, stats); - } - return stats; - } - - // It's ok to not protect these callbacks by a lock since it is - // done in LRUQueryCache - @Override - protected void onClear() { - assert Thread.holdsLock(this); - super.onClear(); - for (Stats stats : shardStats.values()) { - // don't throw away hit/miss - stats.cacheSize = 0; - stats.ramBytesUsed = 0; - } - sharedRamBytesUsed = 0; - } - - @Override - protected void onQueryCache(Query filter, long ramBytesUsed) { - assert Thread.holdsLock(this); - super.onQueryCache(filter, ramBytesUsed); - sharedRamBytesUsed += ramBytesUsed; - } - - @Override - protected void onQueryEviction(Query filter, long ramBytesUsed) { - assert Thread.holdsLock(this); - super.onQueryEviction(filter, ramBytesUsed); - sharedRamBytesUsed -= ramBytesUsed; - } - - @Override - protected void onDocIdSetCache(Object readerCoreKey, long ramBytesUsed) { - assert Thread.holdsLock(this); - super.onDocIdSetCache(readerCoreKey, ramBytesUsed); - final Stats shardStats = getOrCreateStats(readerCoreKey); - shardStats.cacheSize += 1; - shardStats.cacheCount += 1; - shardStats.ramBytesUsed += ramBytesUsed; - - StatsAndCount statsAndCount = stats2.get(readerCoreKey); - if (statsAndCount == null) { - statsAndCount = new StatsAndCount(shardStats); - stats2.put(readerCoreKey, statsAndCount); - } - statsAndCount.count += 1; - } - - @Override - protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) { - assert Thread.holdsLock(this); - super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed); - // onDocIdSetEviction might sometimes be called with a number - // of entries equal to zero if the cache for the given segment - // was already empty when the close listener was called - if (numEntries > 0) { - // We can't use ShardCoreKeyMap here because its core closed - // listener is called before the listener of the cache which - // triggers this eviction. So instead we use use stats2 that - // we only evict when nothing is cached anymore on the segment - // instead of relying on close listeners - final StatsAndCount statsAndCount = stats2.get(readerCoreKey); - final Stats shardStats = statsAndCount.stats; - shardStats.cacheSize -= numEntries; - shardStats.ramBytesUsed -= sumRamBytesUsed; - statsAndCount.count -= numEntries; - if (statsAndCount.count == 0) { - stats2.remove(readerCoreKey); - } - } - } - - @Override - protected void onHit(Object readerCoreKey, Query filter) { - assert Thread.holdsLock(this); - super.onHit(readerCoreKey, filter); - final Stats shardStats = getStats(readerCoreKey); - shardStats.hitCount += 1; - } - - @Override - protected void onMiss(Object readerCoreKey, Query filter) { - assert Thread.holdsLock(this); - super.onMiss(readerCoreKey, filter); - final Stats shardStats = getOrCreateStats(readerCoreKey); - shardStats.missCount += 1; - } - }; + if (INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.get(settings)) { + cache = new ElasticsearchLRUQueryCache(count, size.bytes(), context -> true); + } else { + cache = new ElasticsearchLRUQueryCache(count, size.bytes()); + } sharedRamBytesUsed = 0; } @@ -316,4 +220,111 @@ public void onClose(ShardId shardId) { assert empty(shardStats.get(shardId)); shardStats.remove(shardId); } + + private class ElasticsearchLRUQueryCache extends LRUQueryCache { + + ElasticsearchLRUQueryCache(int maxSize, long maxRamBytesUsed, Predicate leavesToCache) { + super(maxSize, maxRamBytesUsed, leavesToCache); + } + + ElasticsearchLRUQueryCache(int maxSize, long maxRamBytesUsed) { + super(maxSize, maxRamBytesUsed); + } + + private Stats getStats(Object coreKey) { + final ShardId shardId = shardKeyMap.getShardId(coreKey); + if (shardId == null) { + return null; + } + return shardStats.get(shardId); + } + + private Stats getOrCreateStats(Object coreKey) { + final ShardId shardId = shardKeyMap.getShardId(coreKey); + Stats stats = shardStats.get(shardId); + if (stats == null) { + stats = new Stats(); + shardStats.put(shardId, stats); + } + return stats; + } + + // It's ok to not protect these callbacks by a lock since it is + // done in LRUQueryCache + @Override + protected void onClear() { + super.onClear(); + for (Stats stats : shardStats.values()) { + // don't throw away hit/miss + stats.cacheSize = 0; + stats.ramBytesUsed = 0; + } + sharedRamBytesUsed = 0; + } + + @Override + protected void onQueryCache(Query filter, long ramBytesUsed) { + super.onQueryCache(filter, ramBytesUsed); + sharedRamBytesUsed += ramBytesUsed; + } + + @Override + protected void onQueryEviction(Query filter, long ramBytesUsed) { + super.onQueryEviction(filter, ramBytesUsed); + sharedRamBytesUsed -= ramBytesUsed; + } + + @Override + protected void onDocIdSetCache(Object readerCoreKey, long ramBytesUsed) { + super.onDocIdSetCache(readerCoreKey, ramBytesUsed); + final Stats shardStats = getOrCreateStats(readerCoreKey); + shardStats.cacheSize += 1; + shardStats.cacheCount += 1; + shardStats.ramBytesUsed += ramBytesUsed; + + StatsAndCount statsAndCount = stats2.get(readerCoreKey); + if (statsAndCount == null) { + statsAndCount = new StatsAndCount(shardStats); + stats2.put(readerCoreKey, statsAndCount); + } + statsAndCount.count += 1; + } + + @Override + protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) { + super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed); + // onDocIdSetEviction might sometimes be called with a number + // of entries equal to zero if the cache for the given segment + // was already empty when the close listener was called + if (numEntries > 0) { + // We can't use ShardCoreKeyMap here because its core closed + // listener is called before the listener of the cache which + // triggers this eviction. So instead we use use stats2 that + // we only evict when nothing is cached anymore on the segment + // instead of relying on close listeners + final StatsAndCount statsAndCount = stats2.get(readerCoreKey); + final Stats shardStats = statsAndCount.stats; + shardStats.cacheSize -= numEntries; + shardStats.ramBytesUsed -= sumRamBytesUsed; + statsAndCount.count -= numEntries; + if (statsAndCount.count == 0) { + stats2.remove(readerCoreKey); + } + } + } + + @Override + protected void onHit(Object readerCoreKey, Query filter) { + super.onHit(readerCoreKey, filter); + final Stats shardStats = getStats(readerCoreKey); + shardStats.hitCount += 1; + } + + @Override + protected void onMiss(Object readerCoreKey, Query filter) { + super.onMiss(readerCoreKey, filter); + final Stats shardStats = getOrCreateStats(readerCoreKey); + shardStats.missCount += 1; + } + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java index 192ad6c28dc29..ec838e7dd41ad 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.spatial.util.GeoEncodingUtils; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; @@ -82,9 +82,9 @@ public void collect(int doc, long bucket) throws IOException { counts.increment(bucket, valueCount); // get the previous GeoPoint if a moving avg was computed if (prevCounts > 0) { - final GeoPoint centroid = GeoPoint.fromIndexLong(centroids.get(bucket)); - pt[0] = centroid.lon(); - pt[1] = centroid.lat(); + final long mortonCode = centroids.get(bucket); + pt[0] = GeoPointField.decodeLongitude(mortonCode); + pt[1] = GeoPointField.decodeLatitude(mortonCode); } // update the moving average for (int i = 0; i < valueCount; ++i) { @@ -92,7 +92,9 @@ public void collect(int doc, long bucket) throws IOException { pt[0] = pt[0] + (value.getLon() - pt[0]) / ++prevCounts; pt[1] = pt[1] + (value.getLat() - pt[1]) / prevCounts; } - centroids.set(bucket, GeoEncodingUtils.mortonHash(pt[1], pt[0])); + // TODO: we do not need to interleave the lat and lon bits here + // should we just store them contiguously? + centroids.set(bucket, GeoPointField.encodeLatLon(pt[1], pt[0])); } } }; @@ -104,8 +106,10 @@ public InternalAggregation buildAggregation(long bucket) { return buildEmptyAggregation(); } final long bucketCount = counts.get(bucket); - final GeoPoint bucketCentroid = (bucketCount > 0) ? GeoPoint.fromIndexLong(centroids.get(bucket)) : - new GeoPoint(Double.NaN, Double.NaN); + final long mortonCode = centroids.get(bucket); + final GeoPoint bucketCentroid = (bucketCount > 0) + ? new GeoPoint(GeoPointField.decodeLatitude(mortonCode), GeoPointField.decodeLongitude(mortonCode)) + : null; return new InternalGeoCentroid(name, bucketCentroid , bucketCount, pipelineAggregators(), metaData()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java index 2798169b69936..2bb3056ca6692 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid; -import org.apache.lucene.spatial.util.GeoEncodingUtils; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -61,6 +61,7 @@ protected InternalGeoCentroid() { public InternalGeoCentroid(String name, GeoPoint centroid, long count, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); + assert (centroid == null) == (count == 0); this.centroid = centroid; assert count >= 0; this.count = count; @@ -68,7 +69,7 @@ public InternalGeoCentroid(String name, GeoPoint centroid, long count, List path) { protected void doReadFrom(StreamInput in) throws IOException { count = in.readVLong(); if (in.readBoolean()) { - centroid = GeoPoint.fromIndexLong(in.readLong()); + final long hash = in.readLong(); + centroid = new GeoPoint(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash)); } else { centroid = null; } @@ -139,7 +141,8 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeVLong(count); if (centroid != null) { out.writeBoolean(true); - out.writeLong(GeoEncodingUtils.mortonHash(centroid.lat(), centroid.lon())); + // should we just write lat and lon separately? + out.writeLong(GeoPointField.encodeLatLon(centroid.lat(), centroid.lon())); } else { out.writeBoolean(false); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index 3192145720763..f34da5301d5e0 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -175,7 +175,7 @@ static class NestedChildrenQuery extends Query { @Override public boolean equals(Object obj) { - if (super.equals(obj) == false) { + if (sameClassAs(obj) == false) { return false; } NestedChildrenQuery other = (NestedChildrenQuery) obj; @@ -187,7 +187,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - int hash = super.hashCode(); + int hash = classHash(); hash = 31 * hash + parentFilter.hashCode(); hash = 31 * hash + childFilter.hashCode(); hash = 31 * hash + docId; diff --git a/core/src/main/java/org/elasticsearch/search/slice/SliceQuery.java b/core/src/main/java/org/elasticsearch/search/slice/SliceQuery.java index 0d87b275403dd..2b8040ebd2866 100644 --- a/core/src/main/java/org/elasticsearch/search/slice/SliceQuery.java +++ b/core/src/main/java/org/elasticsearch/search/slice/SliceQuery.java @@ -61,7 +61,7 @@ public int getMax() { @Override public boolean equals(Object o) { - if (super.equals(o) == false) { + if (sameClassAs(o) == false) { return false; } SliceQuery that = (SliceQuery) o; @@ -70,7 +70,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), field, id, max); + return Objects.hash(classHash(), field, id, max); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index b967a6b6e715f..429a3ebe89264 100644 --- a/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -74,11 +74,7 @@ private DocIdSet build(LeafReader reader) throws IOException { int hashCode = term.hashCode(); if (contains(hashCode)) { docsEnum = te.postings(docsEnum, PostingsEnum.NONE); - int docId = docsEnum.nextDoc(); - while (docId != DocIdSetIterator.NO_MORE_DOCS) { - builder.add(docId); - docId = docsEnum.nextDoc(); - } + builder.add(docsEnum); } } return builder.build(); diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index ff0ea773667b2..b185289b58dd6 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-6.0.1.jar}" { +grant codeBase "${codebase.lucene-core-6.1.0-snapshot-3a57bea.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,6 +42,11 @@ grant codeBase "${codebase.lucene-core-6.0.1.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; +grant codeBase "${codebase.lucene-misc-6.1.0-snapshot-3a57bea.jar}" { + // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper + permission java.nio.file.LinkPermission "hard"; +}; + //// Everything else: grant { diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index d4ab6e01ab926..d7faab5eeda99 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -31,9 +31,11 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-6.0.1.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.1.0-snapshot-3a57bea.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + // needed for testing hardlinks in StoreRecoveryTests since we install MockFS + permission java.nio.file.LinkPermission "hard"; }; grant codeBase "${codebase.randomizedtesting-runner-2.3.2.jar}" { diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 65c91f5daab78..862cccab318a2 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -270,7 +270,8 @@ public void testLuceneVersionIsSameOnMinorRelease() { assertTrue("lucene versions must be " + other + " >= " + version, other.luceneVersion.onOrAfter(version.luceneVersion)); } - if (other.major == version.major && other.minor == version.minor) { + if (other.isAlpha() == false && version.isAlpha() == false + && other.major == version.major && other.minor == version.minor) { assertEquals(other.luceneVersion.major, version.luceneVersion.major); assertEquals(other.luceneVersion.minor, version.luceneVersion.minor); // should we also assert the lucene bugfix version? diff --git a/core/src/test/java/org/elasticsearch/bootstrap/JavaVersionTests.java b/core/src/test/java/org/elasticsearch/bootstrap/JavaVersionTests.java index d2ef349625e4c..a6e74a4770635 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/JavaVersionTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/JavaVersionTests.java @@ -72,4 +72,8 @@ public void testInvalidVersions() { assertFalse(JavaVersion.isValid(version)); } } + + public void testJava8Compat() { + assertEquals(JavaVersion.parse("1.8"), JavaVersion.parse("8")); + } } \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java index 062774bf2f74c..4e4d638d35549 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java @@ -22,9 +22,9 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.util.GeoUtils; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; import static org.elasticsearch.test.geo.RandomShapeGenerator.randomPoint; import static org.hamcrest.Matchers.allOf; @@ -105,8 +105,8 @@ private void assertValues(MultiGeoPointValues values, int docId, boolean missing assertThat(docCount, greaterThan(0)); for (int i = 0; i < docCount; ++i) { final GeoPoint point = values.valueAt(i); - assertThat(point.lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT_INCL), lessThanOrEqualTo(GeoUtils.MAX_LAT_INCL))); - assertThat(point.lon(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON_INCL), lessThanOrEqualTo(GeoUtils.MAX_LON_INCL))); + assertThat(point.lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT), lessThanOrEqualTo(GeoUtils.MAX_LAT))); + assertThat(point.lon(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON), lessThanOrEqualTo(GeoUtils.MAX_LON))); } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index 177d3b7b0f730..b2e1989454c62 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper.externalvalues; -import org.apache.lucene.spatial.util.GeoEncodingUtils; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; @@ -88,7 +88,7 @@ public void testExternalValues() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); } else { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0))); + assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); } assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); @@ -146,7 +146,7 @@ public void testExternalValuesWithMultifield() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); } else { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0))); + assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); } assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); @@ -208,7 +208,7 @@ public void testExternalValuesWithMultifieldTwoLevels() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); } else { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0))); + assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); } assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index a1fdb7ec60fa0..202afd7a4b170 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.geo; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -46,7 +47,6 @@ import java.util.Map; import java.lang.NumberFormatException; -import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -90,7 +90,7 @@ public void testLatLonValues() throws Exception { if (indexCreatedBefore22 == true) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -197,7 +197,7 @@ public void testNormalizeLatLonValuesDefault() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0")); } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(89.0, 1.0))); + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(89.0, 1.0))); } doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() @@ -209,7 +209,7 @@ public void testNormalizeLatLonValuesDefault() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0")); } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-89.0, -1.0))); + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-89.0, -1.0))); } doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() @@ -221,7 +221,7 @@ public void testNormalizeLatLonValuesDefault() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0")); } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-1.0, -179.0))); + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-1.0, -179.0))); } } @@ -408,7 +408,7 @@ public void testLatLonValuesStored() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -441,7 +441,7 @@ public void testArrayLatLonValues() throws Exception { assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2)); assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3)); // indexed hash - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); // point field for 2nd value assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4)); @@ -450,7 +450,7 @@ public void testArrayLatLonValues() throws Exception { assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4)); assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5)); // indexed hash - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); } else { assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); @@ -459,14 +459,14 @@ public void testArrayLatLonValues() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); } } } @@ -491,7 +491,7 @@ public void testLatLonInOneValue() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -517,7 +517,7 @@ public void testLatLonInOneValueStored() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -559,12 +559,12 @@ public void testLatLonInOneValueArray() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); } } @@ -588,7 +588,7 @@ public void testLonLatArray() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -613,7 +613,7 @@ public void testLonLatArrayDynamic() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -639,7 +639,7 @@ public void testLonLatArrayStored() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -669,14 +669,14 @@ public void testLonLatArrayArrayStored() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); } } else { assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(4)); @@ -685,12 +685,12 @@ public void testLonLatArrayArrayStored() throws Exception { assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2)); assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3)); - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4)); assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4)); assertThat(doc.rootDoc().getFields("point.lon")[2].numericValue().doubleValue(), equalTo(1.5)); assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5)); - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 837cef6a17c13..90528c9a8f471 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper.geo; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; @@ -35,7 +36,6 @@ import java.util.Collection; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; -import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -72,7 +72,7 @@ public void testLatLonValues() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } @@ -96,7 +96,7 @@ public void testLatLonInOneValue() throws Exception { if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3))); + assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java index 522a35ccd5dad..884f52cc0ed0a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java @@ -21,7 +21,6 @@ import java.net.InetAddress; import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.document.XInetAddressPoint; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.util.BytesRef; @@ -69,11 +68,11 @@ public void testTermQuery() { ip = "2001:db8::2:1"; String prefix = ip + "/64"; - assertEquals(XInetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, null)); + assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, null)); ip = "192.168.1.7"; prefix = ip + "/16"; - assertEquals(XInetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null)); + assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null)); ft.setIndexOptions(IndexOptions.NONE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -88,7 +87,7 @@ public void testRangeQuery() { assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), - XInetAddressPoint.MAX_VALUE), + InetAddressPoint.MAX_VALUE), ft.rangeQuery(null, null, randomBoolean(), randomBoolean())); assertEquals( @@ -106,13 +105,13 @@ public void testRangeQuery() { assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), - XInetAddressPoint.MAX_VALUE), + InetAddressPoint.MAX_VALUE), ft.rangeQuery("2001:db8::", null, true, randomBoolean())); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), - XInetAddressPoint.MAX_VALUE), + InetAddressPoint.MAX_VALUE), ft.rangeQuery("2001:db8::", null, false, randomBoolean())); assertEquals( @@ -152,7 +151,7 @@ public void testRangeQuery() { assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::1:0:0:0"), - XInetAddressPoint.MAX_VALUE), + InetAddressPoint.MAX_VALUE), // same lo/hi values but inclusive=false so this won't match anything ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true)); diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index 7780d218b528c..387df7ac3cab1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -22,10 +22,10 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery; -import org.apache.lucene.spatial.util.GeoEncodingUtils; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import org.elasticsearch.test.AbstractQueryTestCase; @@ -213,7 +213,7 @@ private void assertGeoPointQuery(GeoDistanceQueryBuilder queryBuilder, Query que if (queryBuilder.geoDistance() != null) { distance = queryBuilder.geoDistance().normalize(distance, DistanceUnit.DEFAULT); distance = org.elasticsearch.common.geo.GeoUtils.maxRadialDistance(queryBuilder.point(), distance); - assertThat(geoQuery.getRadiusMeters(), closeTo(distance, GeoEncodingUtils.TOLERANCE)); + assertThat(geoQuery.getRadiusMeters(), closeTo(distance, GeoUtils.TOLERANCE)); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java index a14bf7e0f3109..3e10eda34e24a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java @@ -22,7 +22,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery; -import org.apache.lucene.spatial.util.GeoDistanceUtils; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.compress.CompressedXContent; @@ -61,7 +60,7 @@ protected GeoDistanceRangeQueryBuilder doCreateTestQueryBuilder() { } } GeoPoint point = builder.point(); - final double maxRadius = GeoDistanceUtils.maxRadialDistanceMeters(point.lat(), point.lon()); + final double maxRadius = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon()); final int fromValueMeters = randomInt((int)(maxRadius*0.5)); final int toValueMeters = randomIntBetween(fromValueMeters + 1, (int)maxRadius); DistanceUnit fromToUnits = randomFrom(DistanceUnit.values()); diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index b41d8efafdfba..e6fd5abd05ee5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -100,8 +100,9 @@ private void assertGeoPointQuery(GeoPolygonQueryBuilder queryBuilder, Query quer GeoPointInPolygonQuery geoQuery = (GeoPointInPolygonQuery) query; assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName())); List queryBuilderPoints = queryBuilder.points(); - double[] lats = geoQuery.getLats(); - double[] lons = geoQuery.getLons(); + assertEquals(1, geoQuery.getPolygons().length); + double[] lats = geoQuery.getPolygons()[0].getPolyLats(); + double[] lons = geoQuery.getPolygons()[0].getPolyLons(); assertThat(lats.length, equalTo(queryBuilderPoints.size())); assertThat(lons.length, equalTo(queryBuilderPoints.size())); for (int i=0; i < queryBuilderPoints.size(); ++i) { @@ -321,8 +322,9 @@ private void assertGeoPolygonQuery(String query) throws IOException { } else { GeoPointInPolygonQuery q = (GeoPointInPolygonQuery) parsedQuery; assertThat(q.getField(), equalTo(GEO_POINT_FIELD_NAME)); - final double[] lats = q.getLats(); - final double[] lons = q.getLons(); + assertEquals(1, q.getPolygons().length); + final double[] lats = q.getPolygons()[0].getPolyLats(); + final double[] lons = q.getPolygons()[0].getPolyLons(); assertThat(lats.length, equalTo(4)); assertThat(lons.length, equalTo(4)); assertThat(lats[0], closeTo(40, 1E-5)); diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java index fbb744f799c59..d8021ad36bb25 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java @@ -61,5 +61,15 @@ public String toString(String field) { public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return matchAllDocsQuery.createWeight(searcher, needsScores); } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } } } \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java b/core/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java index ffb64f991ccad..f31733dc47723 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java @@ -31,7 +31,6 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.Version; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.test.ESTestCase; @@ -74,11 +73,9 @@ public void testAddIndices() throws IOException { assertEquals(numFiles, targetNumFiles); assertEquals(indexStats.totalFileCount(), targetNumFiles); if (hardLinksSupported(createTempDir())) { - assertEquals("upgrade to HardlinkCopyDirectoryWrapper in Lucene 6.1", Version.LATEST, Version.LUCENE_6_0_1); - // assertEquals(indexStats.reusedFileCount(), targetNumFiles); -- uncomment this once upgraded to Lucene 6.1 - assertEquals(indexStats.reusedFileCount(), 0); + assertEquals(targetNumFiles, indexStats.reusedFileCount()); } else { - assertEquals(indexStats.reusedFileCount(), 0); + assertEquals(0, indexStats.reusedFileCount(), 0); } DirectoryReader reader = DirectoryReader.open(target); SegmentInfos segmentCommitInfos = SegmentInfos.readLatestCommit(target); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index 5a4aa2e6b24c3..cd94ee0f8e954 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -35,6 +35,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesQueryCache; @@ -54,12 +55,12 @@ private static class DummyQuery extends Query { @Override public boolean equals(Object obj) { - return super.equals(obj) && id == ((DummyQuery) obj).id; + return sameClassAs(obj) && id == ((DummyQuery) obj).id; } @Override public int hashCode() { - return 31 * super.hashCode() + id; + return 31 * classHash() + id; } @Override @@ -93,6 +94,7 @@ public void testBasics() throws IOException { Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) + .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true) .build(); IndicesQueryCache cache = new IndicesQueryCache(settings); s.setQueryCache(cache); @@ -173,6 +175,7 @@ public void testTwoShards() throws IOException { Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) + .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true) .build(); IndicesQueryCache cache = new IndicesQueryCache(settings); s1.setQueryCache(cache); @@ -298,6 +301,7 @@ public void testStatsOnEviction() throws IOException { Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) + .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true) .build(); IndicesQueryCache cache = new IndicesQueryCache(settings); s1.setQueryCache(cache); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 3565cf0147d35..a4096fde9da5f 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -47,6 +47,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.sort.SortOrder; @@ -78,6 +79,7 @@ protected Settings nodeSettings(int nodeOrdinal) { //Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad return Settings.builder().put(super.nodeSettings(nodeOrdinal)) .put(IndicesService.INDICES_CACHE_CLEAN_INTERVAL_SETTING.getKey(), "1ms") + .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true) .build(); } diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java index 7e01f5758222d..398ef64bc92fd 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java @@ -126,6 +126,8 @@ public void testSimpleBoundingBoxTest() throws Exception { } } + // norelease + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-7325") public void testLimitsBoundingBox() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index b8b04a8bc3352..0debdb263af07 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -30,7 +30,6 @@ import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialOperation; import org.apache.lucene.spatial.query.UnsupportedSpatialOperation; -import org.apache.lucene.spatial.util.GeoProjectionUtils; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -42,6 +41,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; import org.elasticsearch.common.geo.builders.LineStringBuilder; import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; @@ -540,7 +540,7 @@ public void testNeighbors() { } public static double distance(double lat1, double lon1, double lat2, double lon2) { - return GeoProjectionUtils.SEMIMAJOR_AXIS * DistanceUtils.distHaversineRAD( + return GeoUtils.EARTH_SEMI_MAJOR_AXIS * DistanceUtils.distHaversineRAD( DistanceUtils.toRadians(lat1), DistanceUtils.toRadians(lon1), DistanceUtils.toRadians(lat2), diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index ba378a3c404e1..e0aec941487f1 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -318,13 +318,7 @@ public void testExplain() throws Exception { assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); Explanation explanation = searchResponse.getHits().hits()[0].explanation(); assertThat(explanation.getValue(), equalTo(2f)); - assertThat(explanation.toString(), startsWith("2.0 = sum of:\n 2.0 = Score based on child doc range from 0 to 1\n")); - // TODO: Enable when changes from BlockJoinQuery#explain are added to Lucene (Most likely version 4.2) -// assertThat(explanation.getDetails().length, equalTo(2)); -// assertThat(explanation.getDetails()[0].getValue(), equalTo(1f)); -// assertThat(explanation.getDetails()[0].getDescription(), equalTo("Child[0]")); -// assertThat(explanation.getDetails()[1].getValue(), equalTo(1f)); -// assertThat(explanation.getDetails()[1].getDescription(), equalTo("Child[1]")); + assertThat(explanation.toString(), startsWith("2.0 = sum of:\n 2.0 = Score based on 2 child docs in range from 0 to 1")); } public void testSimpleNestedSorting() throws Exception { diff --git a/distribution/licenses/lucene-analyzers-common-6.0.1.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.0.1.jar.sha1 deleted file mode 100644 index b581809a0043d..0000000000000 --- a/distribution/licenses/lucene-analyzers-common-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -53953c1a9b097f83209c84a422cf8f9d271f47c1 \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..bb4cc98e068ea --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +bf73c03e6b83f8e696133f40b9b1fc3381750149 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.0.1.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.0.1.jar.sha1 deleted file mode 100644 index 5433f09c99341..0000000000000 --- a/distribution/licenses/lucene-backward-codecs-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3647088603be84b8f4916ef86954e3336b98d254 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..a8eb4883e1512 --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +8bc384f55faf99b6d6cee6f34df4fbd3145afb4d \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.0.1.jar.sha1 b/distribution/licenses/lucene-core-6.0.1.jar.sha1 deleted file mode 100644 index 6bcd7fc87f483..0000000000000 --- a/distribution/licenses/lucene-core-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40ccd40bec54266a10aa1f81c565914ede8c0ca0 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-core-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..012fea19fdc1c --- /dev/null +++ b/distribution/licenses/lucene-core-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +fe19e7558440e10db4bd7150931dff6a7cf73243 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.0.1.jar.sha1 b/distribution/licenses/lucene-grouping-6.0.1.jar.sha1 deleted file mode 100644 index b132acc9112c9..0000000000000 --- a/distribution/licenses/lucene-grouping-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -010daaae60227fbe719ca95e9b6fcdb5c38d4eba \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-grouping-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..3b3338ffd2f02 --- /dev/null +++ b/distribution/licenses/lucene-grouping-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +c3f0de4cdd185d23bce66c580d9c12adb98182a5 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.0.1.jar.sha1 b/distribution/licenses/lucene-highlighter-6.0.1.jar.sha1 deleted file mode 100644 index 95d4b3edab9c4..0000000000000 --- a/distribution/licenses/lucene-highlighter-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -65d74c3642e6a86ba905045473b17cc84826527e \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-highlighter-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..255812ed5fb78 --- /dev/null +++ b/distribution/licenses/lucene-highlighter-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +ffb7087267bb6076b00c90f97ee36ebe23ea0662 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.0.1.jar.sha1 b/distribution/licenses/lucene-join-6.0.1.jar.sha1 deleted file mode 100644 index 07392cf260f31..0000000000000 --- a/distribution/licenses/lucene-join-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2105e2826ce93d1f764e5a0a3afa9ee461d556c1 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-join-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..4231857b4e76a --- /dev/null +++ b/distribution/licenses/lucene-join-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +d071ad17bed58b3267f6fa0b2a8211f8fe18c912 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.0.1.jar.sha1 b/distribution/licenses/lucene-memory-6.0.1.jar.sha1 deleted file mode 100644 index b9820103d3f8f..0000000000000 --- a/distribution/licenses/lucene-memory-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e2cde0688e487a27d08df0c2d81d492b1f4cdc2a \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-memory-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..19aa64ccd80c7 --- /dev/null +++ b/distribution/licenses/lucene-memory-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +f5e9b6eefe580a7f65276aca3192ca5796332509 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.0.1.jar.sha1 b/distribution/licenses/lucene-misc-6.0.1.jar.sha1 deleted file mode 100644 index 2670ab628df8f..0000000000000 --- a/distribution/licenses/lucene-misc-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e6e59996fc324319d695e41cf25e30e5f1e4c182 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-misc-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..8480fcc3a490d --- /dev/null +++ b/distribution/licenses/lucene-misc-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +6b84a79c37b01197130cceb65e5573794f073df1 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.0.1.jar.sha1 b/distribution/licenses/lucene-queries-6.0.1.jar.sha1 deleted file mode 100644 index acaa53f1f8e25..0000000000000 --- a/distribution/licenses/lucene-queries-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09b0e5862a676ff9e55a1bc6ca37ad578a25cb38 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-queries-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..a3ed70c12afd2 --- /dev/null +++ b/distribution/licenses/lucene-queries-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +a9d51b77395dfdd7e6c4cf8c8506ebca5e1bb374 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.0.1.jar.sha1 b/distribution/licenses/lucene-queryparser-6.0.1.jar.sha1 deleted file mode 100644 index 48c91d68f4425..0000000000000 --- a/distribution/licenses/lucene-queryparser-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24f7ba0707aa01be2dd7749adff1659262be8f33 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-queryparser-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..6bf59bc00e75b --- /dev/null +++ b/distribution/licenses/lucene-queryparser-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +e322f004e574df119ba08dd8751a743422a46724 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.0.1.jar.sha1 b/distribution/licenses/lucene-sandbox-6.0.1.jar.sha1 deleted file mode 100644 index ef843328aa0b6..0000000000000 --- a/distribution/licenses/lucene-sandbox-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0faf4c0d7e0adb6fccd830a2d5797d4176b579fe \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-sandbox-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..79567427105a4 --- /dev/null +++ b/distribution/licenses/lucene-sandbox-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +c7cb119652c906adcdf7fe64445c76d057329d63 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.0.1.jar.sha1 b/distribution/licenses/lucene-spatial-6.0.1.jar.sha1 deleted file mode 100644 index 25e7232ac1434..0000000000000 --- a/distribution/licenses/lucene-spatial-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d94d006251c904de3f1503c64746400877d6fa3 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-spatial-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..2b06178aaad4e --- /dev/null +++ b/distribution/licenses/lucene-spatial-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +ca6c17fe31884e968ae63fd475ce6532b767c7fa \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.0.1.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.0.1.jar.sha1 deleted file mode 100644 index d421e1b053ccc..0000000000000 --- a/distribution/licenses/lucene-spatial-extras-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3de19dbdb889fe87791dae291ac3b340586854c4 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..9c487b7746f8d --- /dev/null +++ b/distribution/licenses/lucene-spatial-extras-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +49235405e40757474aaa9e8e54946b67fe2a01d9 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.0.1.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.0.1.jar.sha1 deleted file mode 100644 index 348f501bb5299..0000000000000 --- a/distribution/licenses/lucene-spatial3d-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5b1b7a754e83e2d58a819afa279b20b08b48c9c1 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..1eaab9f995558 --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +39f6b29c428327860c1a342bd57800e79ad92ef5 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.0.1.jar.sha1 b/distribution/licenses/lucene-suggest-6.0.1.jar.sha1 deleted file mode 100644 index 2cb6272d82651..0000000000000 --- a/distribution/licenses/lucene-suggest-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55886bdaf16ecc6948e94b527837eaa1f16fe988 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.1.0-snapshot-3a57bea.jar.sha1 b/distribution/licenses/lucene-suggest-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..948dfc4c2b909 --- /dev/null +++ b/distribution/licenses/lucene-suggest-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +4e9f713d34fd4208bf308ac59132216f96521f13 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.0.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.0.1.jar.sha1 deleted file mode 100644 index 7b3f5a1cef947..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2b76056dbd40fb51dc5e8ef71e1919ad23e635a1 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.1.0-snapshot-3a57bea.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..271088e86c9ca --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +e5a4b673918f448006c0531799706abebe9a1db0 \ No newline at end of file diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index 11f0e4c191a9d..78d95edecff49 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; @@ -41,7 +42,6 @@ import java.util.List; import java.util.Random; -import static org.apache.lucene.spatial.util.GeoEncodingUtils.TOLERANCE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -218,7 +218,7 @@ public void testSortMinValueScript() throws IOException { assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L)); for (int i = 0; i < 10; i++) { - assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo(i, TOLERANCE)); + assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo(i, GeoUtils.TOLERANCE)); } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 2f3108d629812..fbf06468e75f1 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -243,7 +243,7 @@ public QueryStore getQueryStore() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; + if (sameClassAs(o) == false) return false; PercolateQuery that = (PercolateQuery) o; @@ -254,7 +254,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - int result = super.hashCode(); + int result = classHash(); result = 31 * result + documentType.hashCode(); result = 31 * result + documentSource.hashCode(); return result; diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index 07959db1ff189..4879badc7d34a 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -379,6 +379,16 @@ public Query rewrite(IndexReader reader) throws IOException { public String toString(String field) { return "custom{" + field + "}"; } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } } } diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.1.jar.sha1 deleted file mode 100644 index 95dab25e74a14..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da08d9919f54efd2e09968d49fe05f6ce3f0c7ce \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.1.0-snapshot-3a57bea.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..013def114d487 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +9cd8cbea5baef18a36bee86846a9ba026d2a02e0 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.1.jar.sha1 deleted file mode 100644 index 70f83bf52ccbf..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -77905f563c47994a764a6ab3d5ec198c174567a7 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.1.0-snapshot-3a57bea.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..12c861c24ab16 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +86c6d6a367ed658351bd8c8828d6ed647ac79b7e \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.1.jar.sha1 deleted file mode 100644 index 8e2f7ab8b98e9..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3ee5d909c269e5da7a92715f41ead88943b38123 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.1.0-snapshot-3a57bea.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..6f571d75537f4 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +6553bf764a69cd15e4fe1e55661382872795b853 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.1.jar.sha1 deleted file mode 100644 index 981855d5a97d3..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3b7bdbf9efa84f8d8875bd7f1d8734276930b9c3 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.1.0-snapshot-3a57bea.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..2ea2d6b96222e --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +979817950bc806400d8fa12a609ef215b5bdebd6 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.1.jar.sha1 deleted file mode 100644 index 4ff0afee68769..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e80e912621276e1009b72c06d5def188976c5426 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.1.0-snapshot-3a57bea.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.1.0-snapshot-3a57bea.jar.sha1 new file mode 100644 index 0000000000000..6677cfd3fc4b2 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.1.0-snapshot-3a57bea.jar.sha1 @@ -0,0 +1 @@ +2a720b647b6a202ec1d8d91db02006ae9539670b \ No newline at end of file diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 2a3eecf4cc819..4e066bc7635fd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -108,6 +108,7 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.store.IndicesStore; @@ -1633,7 +1634,11 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b") .put("script.stored", "true") .put("script.inline", "true") - // wait short time for other active shards before actually deleting, default 30s not needed in tests + // by default we never cache below 10k docs in a segment, + // bypass this limit so that caching gets some testing in + // integration tests that usually create few documents + .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), nodeOrdinal % 2 == 0) + // wait short time for other active shards before actually deleting, default 30s not needed in tests .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(1, TimeUnit.SECONDS)); return builder.build(); }