Skip to content

Commit

Permalink
Merge branch '6.x' into ccr-6.x
Browse files Browse the repository at this point in the history
* 6.x:
  Add an example of dynamic field names (#27255)
  fixed checkstyle violation
  #26260 Allow ip_range to accept CIDR notation (#27192)
  #27189 Fixed rounding of bounds in scaled float comparison (#27207)
  [TEST] Fix failing exists query test
  test: Do not run old parent/child tests against a cluster with minimum version greater than 6.0.0
  Add support for Gradle 4.3 (#27249)
  Fixes QueryStringQueryBuilderTests
  build: Fix setting the incorrect bwc version in mixed cluster qa module
  fix compil after backport
  [Test] Fix QueryStringQueryBuilderTests.testExistsFieldQuery BWC
  Adjust assertions for sequence numbers BWC tests
  Do not create directories if repository is readonly (#26909)
  [Test] Fix QueryStringQueryBuilderTests.testExistsFieldQuery
  Uses norms for exists query if enabled (#27237)
  Reinstate recommendation for ≥ 3 master-eligible nodes. (#27204)
  • Loading branch information
martijnvg committed Nov 4, 2017
2 parents 1166c19 + d4849df commit 3ed5aaa
Show file tree
Hide file tree
Showing 22 changed files with 348 additions and 66 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,10 @@ class BuildPlugin implements Plugin<Project> {
throw new GradleException("${minGradle} or above is required to build elasticsearch")
}

final GradleVersion maxGradle = GradleVersion.version('4.2')
if (currentGradleVersion >= maxGradle) {
throw new GradleException("${maxGradle} or above is not compatible with the elasticsearch build")
final GradleVersion gradle42 = GradleVersion.version('4.2')
final GradleVersion gradle43 = GradleVersion.version('4.3')
if (currentGradleVersion >= gradle42 && currentGradleVersion < gradle43) {
throw new GradleException("${currentGradleVersion} is not compatible with the elasticsearch build")
}

// enforce Java version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,15 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {

private final int bufferSizeInBytes;

private final boolean readOnly;

public FsBlobStore(Settings settings, Path path) throws IOException {
super(settings);
this.path = path;
Files.createDirectories(path);
this.readOnly = settings.getAsBoolean("readonly", false);
if (!this.readOnly) {
Files.createDirectories(path);
}
this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.fs.buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes();
}

Expand Down Expand Up @@ -80,7 +85,9 @@ public void close() {

private synchronized Path buildAndCreate(BlobPath path) throws IOException {
Path f = buildPath(path);
Files.createDirectories(f);
if (!readOnly) {
Files.createDirectories(f);
}
return f;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -280,10 +280,10 @@ public String typeName() {

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
if (omitNorms()) {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
} else {
return new NormsFieldExistsQuery(name());
}
}

Expand Down Expand Up @@ -356,7 +356,9 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().name(), value, fieldType());
fields.add(field);
createFieldNamesField(context, fields);
if (fieldType().omitNorms()) {
createFieldNamesField(context, fields);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,14 @@

import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.blobstore.fs.FsBlobStore;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.repositories.ESBlobStoreTestCase;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;

@LuceneTestCase.SuppressFileSystems("ExtrasFS")
Expand All @@ -35,4 +37,39 @@ protected BlobStore newBlobStore() throws IOException {
Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build();
return new FsBlobStore(settings, tempDir);
}

public void testReadOnly() throws Exception {
Settings settings = Settings.builder().put("readonly", true).build();
Path tempDir = createTempDir();
Path path = tempDir.resolve("bar");

try (FsBlobStore store = new FsBlobStore(settings, path)) {
assertFalse(Files.exists(path));
BlobPath blobPath = BlobPath.cleanPath().add("foo");
store.blobContainer(blobPath);
Path storePath = store.path();
for (String d : blobPath) {
storePath = storePath.resolve(d);
}
assertFalse(Files.exists(storePath));
}

settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("readonly", false).build();
try (FsBlobStore store = new FsBlobStore(settings, path)) {
assertTrue(Files.exists(path));
BlobPath blobPath = BlobPath.cleanPath().add("foo");
BlobContainer container = store.blobContainer(blobPath);
Path storePath = store.path();
for (String d : blobPath) {
storePath = storePath.resolve(d);
}
assertTrue(Files.exists(storePath));
assertTrue(Files.isDirectory(storePath));

byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
writeBlob(container, "test", new BytesArray(data));
assertArrayEquals(readBlobFully(container, "test", data.length), data);
assertTrue(container.blobExists("test"));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.elasticsearch.test.ESSingleNodeTestCase;

import java.util.Arrays;
import java.util.Collections;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
Expand Down Expand Up @@ -87,7 +88,7 @@ public void testInjectIntoDocDuringParsing() throws Exception {
.bytes(),
XContentType.JSON));

assertFieldNames(set("a"), doc);
assertFieldNames(Collections.emptySet(), doc);
}

public void testExplicitEnabled() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
Expand Down Expand Up @@ -113,6 +114,11 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query,
assertThat(constantScoreQuery.getQuery(), instanceOf(DocValuesFieldExistsQuery.class));
DocValuesFieldExistsQuery dvExistsQuery = (DocValuesFieldExistsQuery) constantScoreQuery.getQuery();
assertEquals(field, dvExistsQuery.getField());
} else if (field.equals("_all") == false &&
context.getQueryShardContext().getMapperService().fullName(field).omitNorms() == false) {
assertThat(constantScoreQuery.getQuery(), instanceOf(NormsFieldExistsQuery.class));
NormsFieldExistsQuery normsExistsQuery = (NormsFieldExistsQuery) constantScoreQuery.getQuery();
assertEquals(field, normsExistsQuery.getField());
} else {
assertThat(constantScoreQuery.getQuery(), instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) constantScoreQuery.getQuery();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
Expand All @@ -45,6 +46,7 @@
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
Expand Down Expand Up @@ -804,17 +806,20 @@ public void testExistsFieldQuery() throws Exception {
QueryShardContext context = createShardContext();
QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder(STRING_FIELD_NAME + ":*");
Query query = queryBuilder.toQuery(context);
Query expected;
if (getCurrentTypes().length > 0) {
expected = new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME)));
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)
&& (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false)) {
assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(STRING_FIELD_NAME))));
} else {
assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME)))));
}
} else {
expected = new MatchNoDocsQuery();
assertThat(query, equalTo(new MatchNoDocsQuery()));
}
assertThat(query, equalTo(expected));

queryBuilder = new QueryStringQueryBuilder("_all:*");
query = queryBuilder.toQuery(context);
expected = new MatchAllDocsQuery();
Query expected = new MatchAllDocsQuery();
assertThat(query, equalTo(expected));

queryBuilder = new QueryStringQueryBuilder("*:*");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.PointRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
Expand Down Expand Up @@ -129,6 +130,9 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query,
if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)
&& context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) {
expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(queryBuilder.fieldName()));
} else if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)
&& context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) {
expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(queryBuilder.fieldName()));
} else {
expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, queryBuilder.fieldName())));
}
Expand Down
14 changes: 14 additions & 0 deletions docs/reference/ingest/ingest-node.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -563,6 +563,20 @@ to set the index that the document will be indexed into:
--------------------------------------------------
// NOTCONSOLE

Dynamic field names are also supported. This example sets the field named after the
value of `service` to the value of the field `code`:

[source,js]
--------------------------------------------------
{
"set": {
"field": "{{service}}"
"value": "{{code}}"
}
}
--------------------------------------------------
// NOTCONSOLE

[[handling-failure-in-pipelines]]
== Handling Failures in Pipelines

Expand Down
7 changes: 7 additions & 0 deletions docs/reference/modules/node.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,13 @@ discovery.zen.minimum_master_nodes: 2 <1>
----------------------------
<1> Defaults to `1`.

To be able to remain available when one of the master-eligible nodes fails,
clusters should have at least three master-eligible nodes, with
`minimum_master_nodes` set accordingly. A <<rolling-upgrades,rolling upgrade>>,
performed without any downtime, also requires at least three master-eligible
nodes to avoid the possibility of data loss if a network split occurs while the
upgrade is in progress.

This setting can also be changed dynamically on a live cluster with the
<<cluster-update-settings,cluster update settings API>>:

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
Expand All @@ -57,6 +58,7 @@

import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
Expand Down Expand Up @@ -357,7 +359,8 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
range = context.parseExternalValue(Range.class);
} else {
XContentParser parser = context.parser();
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
final XContentParser.Token start = parser.currentToken();
if (start == XContentParser.Token.START_OBJECT) {
RangeFieldType fieldType = fieldType();
RangeType rangeType = fieldType.rangeType;
String fieldName = null;
Expand Down Expand Up @@ -397,6 +400,8 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
}
}
range = new Range(rangeType, from, to, includeFrom, includeTo);
} else if (fieldType().rangeType == RangeType.IP && start == XContentParser.Token.VALUE_STRING) {
range = parseIpRangeFromCidr(parser);
} else {
throw new MapperParsingException("error parsing field ["
+ name() + "], expected an object but got " + parser.currentName());
Expand Down Expand Up @@ -448,6 +453,23 @@ && fieldType().dateTimeFormatter().locale() != Locale.ROOT))) {
}
}

private static Range parseIpRangeFromCidr(final XContentParser parser) throws IOException {
final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(parser.text());
// create the lower value by zeroing out the host portion, upper value by filling it with all ones.
byte[] lower = cidr.v1().getAddress();
byte[] upper = lower.clone();
for (int i = cidr.v2(); i < 8 * lower.length; i++) {
int m = 1 << 7 - (i & 7);
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
}
try {
return new Range(RangeType.IP, InetAddress.getByAddress(lower), InetAddress.getByAddress(upper), true, true);
} catch (UnknownHostException bogus) {
throw new AssertionError(bogus);
}
}

/** Enum defining the type of range */
public enum RangeType {
IP("ip_range") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -258,19 +258,19 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower
failIfNotIndexed();
Long lo = null;
if (lowerTerm != null) {
double dValue = parse(lowerTerm);
double dValue = parse(lowerTerm) * scalingFactor;
if (includeLower == false) {
dValue = Math.nextUp(dValue);
}
lo = Math.round(Math.ceil(dValue * scalingFactor));
lo = Math.round(Math.ceil(dValue));
}
Long hi = null;
if (upperTerm != null) {
double dValue = parse(upperTerm);
double dValue = parse(upperTerm) * scalingFactor;
if (includeUpper == false) {
dValue = Math.nextDown(dValue);
}
hi = Math.round(Math.floor(dValue * scalingFactor));
hi = Math.round(Math.floor(dValue));
}
Query query = NumberFieldMapper.NumberType.LONG.rangeQuery(name(), lo, hi, true, true, hasDocValues());
if (boost() != 1f) {
Expand Down
Loading

0 comments on commit 3ed5aaa

Please sign in to comment.