Skip to content

Commit

Permalink
Merge branch 'master' into ccr
Browse files Browse the repository at this point in the history
* master:
  Tests: Fix convert error tests to use fixed value (#32415)
  IndicesClusterStateService should replace an init. replica with an init. primary with the same aId (#32374)
  REST high-level client: parse back _ignored meta field (#32362)
  [CI] Mute DocumentSubsetReaderTests testSearch
  • Loading branch information
dnhatn committed Jul 30, 2018
2 parents aa3b6e0 + 34d006f commit 2245812
Show file tree
Hide file tree
Showing 13 changed files with 190 additions and 86 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,9 @@ public void testConvertIntLeadingZero() throws Exception {
assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(10));
}

@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32370")
public void testConvertIntHexError() {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String value = "0x" + randomAlphaOfLengthBetween(1, 10);
String value = "0xnotanumber";
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value);
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
Expand Down Expand Up @@ -138,10 +137,9 @@ public void testConvertLongLeadingZero() throws Exception {
assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(10L));
}

@AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/32370")
public void testConvertLongHexError() {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String value = "0x" + randomAlphaOfLengthBetween(1, 10);
String value = "0xnotanumber";
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value);
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
Expand Down
24 changes: 19 additions & 5 deletions server/src/main/java/org/elasticsearch/index/get/GetResult.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.index.get;

import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.document.DocumentField;
Expand All @@ -30,6 +31,7 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.search.lookup.SourceLookup;

Expand Down Expand Up @@ -225,10 +227,13 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params
}
}
}

for (DocumentField field : metaFields) {
Object value = field.getValue();
builder.field(field.getName(), value);
// TODO: can we avoid having an exception here?
if (field.getName().equals(IgnoredFieldMapper.NAME)) {
builder.field(field.getName(), field.getValues());
} else {
builder.field(field.getName(), field.<Object>getValue());
}
}

builder.field(FOUND, exists);
Expand Down Expand Up @@ -316,7 +321,11 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index
parser.skipChildren(); // skip potential inner objects for forward compatibility
}
} else if (token == XContentParser.Token.START_ARRAY) {
parser.skipChildren(); // skip potential inner arrays for forward compatibility
if (IgnoredFieldMapper.NAME.equals(currentFieldName)) {
fields.put(currentFieldName, new DocumentField(currentFieldName, parser.list()));
} else {
parser.skipChildren(); // skip potential inner arrays for forward compatibility
}
}
}
return new GetResult(index, type, id, version, found, source, fields);
Expand Down Expand Up @@ -400,7 +409,12 @@ public boolean equals(Object o) {

@Override
public int hashCode() {
return Objects.hash(index, type, id, version, exists, fields, sourceAsMap());
return Objects.hash(version, exists, index, type, id, fields, sourceAsMap());
}

@Override
public String toString() {
return Strings.toString(this, true, true);
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -420,6 +420,12 @@ private void removeShards(final ClusterState state) {
// state may result in a new shard being initialized while having the same allocation id as the currently started shard.
logger.debug("{} removing shard (not active, current {}, new {})", shardId, currentRoutingEntry, newShardRouting);
indexService.removeShard(shardId.id(), "removing shard (stale copy)");
} else if (newShardRouting.primary() && currentRoutingEntry.primary() == false && newShardRouting.initializing()) {
assert currentRoutingEntry.initializing() : currentRoutingEntry; // see above if clause
// this can happen when cluster state batching batches activation of the shard, closing an index, reopening it
// and assigning an initializing primary to this node
logger.debug("{} removing shard (not active, current {}, new {})", shardId, currentRoutingEntry, newShardRouting);
indexService.removeShard(shardId.id(), "removing shard (stale copy)");
}
}
}
Expand Down
33 changes: 23 additions & 10 deletions server/src/main/java/org/elasticsearch/search/SearchHit.java
Original file line number Diff line number Diff line change
Expand Up @@ -602,16 +602,24 @@ private static void declareMetaDataFields(ObjectParser<Map<String, Object>, Void
for (String metadatafield : MapperService.getAllMetaFields()) {
if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false
&& metadatafield.equals(Fields._TYPE) == false) {
parser.declareField((map, field) -> {
@SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
v -> new HashMap<String, DocumentField>());
fieldMap.put(field.getName(), field);
}, (p, c) -> {
List<Object> values = new ArrayList<>();
values.add(parseFieldsValue(p));
return new DocumentField(metadatafield, values);
}, new ParseField(metadatafield), ValueType.VALUE);
if (metadatafield.equals(IgnoredFieldMapper.NAME)) {
parser.declareObjectArray((map, list) -> {
@SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
v -> new HashMap<String, DocumentField>());
DocumentField field = new DocumentField(metadatafield, list);
fieldMap.put(field.getName(), field);
}, (p, c) -> parseFieldsValue(p),
new ParseField(metadatafield));
} else {
parser.declareField((map, field) -> {
@SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
v -> new HashMap<String, DocumentField>());
fieldMap.put(field.getName(), field);
}, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))),
new ParseField(metadatafield), ValueType.VALUE);
}
}
}
}
Expand Down Expand Up @@ -958,4 +966,9 @@ public int hashCode() {
return Objects.hash(field, offset, child);
}
}

@Override
public String toString() {
return Strings.toString(this, true, true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,12 @@
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.AllocationId;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable.Builder;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
Expand All @@ -44,6 +46,7 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;

import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
Expand Down Expand Up @@ -93,7 +96,8 @@ public static ClusterState state(String index, boolean activePrimaryLocal, Shard
IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(Settings.builder()
.put(SETTING_VERSION_CREATED, Version.CURRENT)
.put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
.put(SETTING_CREATION_DATE, System.currentTimeMillis())).primaryTerm(0, primaryTerm).build();
.put(SETTING_CREATION_DATE, System.currentTimeMillis())).primaryTerm(0, primaryTerm)
.build();

RoutingTable.Builder routing = new RoutingTable.Builder();
routing.addAsNew(indexMetaData);
Expand Down Expand Up @@ -138,12 +142,19 @@ public static ClusterState state(String index, boolean activePrimaryLocal, Shard
TestShardRouting.newShardRouting(index, shardId.id(), replicaNode, relocatingNode, false, replicaState,
unassignedInfo));
}
final IndexShardRoutingTable indexShardRoutingTable = indexShardRoutingBuilder.build();

IndexMetaData.Builder indexMetaDataBuilder = new IndexMetaData.Builder(indexMetaData);
indexMetaDataBuilder.putInSyncAllocationIds(0,
indexShardRoutingTable.activeShards().stream().map(ShardRouting::allocationId).map(AllocationId::getId)
.collect(Collectors.toSet())
);

ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded());
state.metaData(MetaData.builder().put(indexMetaDataBuilder.build(), false).generateClusterUuidIfNeeded());
state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(indexMetaData.getIndex())
.addIndexShard(indexShardRoutingBuilder.build())).build());
.addIndexShard(indexShardRoutingTable)).build());
return state.build();
}

Expand Down Expand Up @@ -272,21 +283,21 @@ public static ClusterState stateWithAssignedPrimariesAndOneReplica(String index,
state.routingTable(RoutingTable.builder().add(indexRoutingTableBuilder.build()).build());
return state.build();
}


/**
* Creates cluster state with several indexes, shards and replicas and all shards STARTED.
*/
public static ClusterState stateWithAssignedPrimariesAndReplicas(String[] indices, int numberOfShards, int numberOfReplicas) {

int numberOfDataNodes = numberOfReplicas + 1;
int numberOfDataNodes = numberOfReplicas + 1;
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
for (int i = 0; i < numberOfDataNodes + 1; i++) {
final DiscoveryNode node = newNode(i);
discoBuilder = discoBuilder.add(node);
}
discoBuilder.localNodeId(newNode(0).getId());
discoBuilder.masterNodeId(newNode(numberOfDataNodes + 1).getId());
discoBuilder.masterNodeId(newNode(numberOfDataNodes + 1).getId());
ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
Builder routingTableBuilder = RoutingTable.builder();
Expand Down Expand Up @@ -316,7 +327,7 @@ public static ClusterState stateWithAssignedPrimariesAndReplicas(String[] indice
state.metaData(metadataBuilder);
state.routingTable(routingTableBuilder.build());
return state.build();
}
}

/**
* Creates cluster state with and index that has one shard and as many replicas as numberOfReplicas.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,11 @@
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects;

Expand Down Expand Up @@ -98,14 +102,28 @@ private static DocumentField mutateDocumentField(DocumentField documentField) {

public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType) {
if (randomBoolean()) {
String fieldName = randomFrom(RoutingFieldMapper.NAME);
DocumentField documentField = new DocumentField(fieldName, Collections.singletonList(randomAlphaOfLengthBetween(3, 10)));
String metaField = randomValueOtherThanMany(field -> field.equals(TypeFieldMapper.NAME)
|| field.equals(IndexFieldMapper.NAME) || field.equals(IdFieldMapper.NAME),
() -> randomFrom(MapperService.getAllMetaFields()));
DocumentField documentField;
if (metaField.equals(IgnoredFieldMapper.NAME)) {
int numValues = randomIntBetween(1, 3);
List<Object> ignoredFields = new ArrayList<>(numValues);
for (int i = 0; i < numValues; i++) {
ignoredFields.add(randomAlphaOfLengthBetween(3, 10));
}
documentField = new DocumentField(metaField, ignoredFields);
} else {
//meta fields are single value only, besides _ignored
documentField = new DocumentField(metaField, Collections.singletonList(randomAlphaOfLengthBetween(3, 10)));
}
return Tuple.tuple(documentField, documentField);
} else {
String fieldName = randomAlphaOfLengthBetween(3, 10);
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType);
DocumentField input = new DocumentField(fieldName, tuple.v1());
DocumentField expected = new DocumentField(fieldName, tuple.v2());
return Tuple.tuple(input, expected);
}
String fieldName = randomAlphaOfLengthBetween(3, 10);
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType);
DocumentField input = new DocumentField(fieldName, tuple.v1());
DocumentField expected = new DocumentField(fieldName, tuple.v2());
return Tuple.tuple(input, expected);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ public void testToAndFromXContentEmbedded() throws Exception {
XContentType xContentType = randomFrom(XContentType.values());
Tuple<GetResult, GetResult> tuple = randomGetResult(xContentType);
GetResult getResult = tuple.v1();

// We don't expect to retrieve the index/type/id of the GetResult because they are not rendered
// by the toXContentEmbedded method.
GetResult expectedGetResult = new GetResult(null, null, null, -1,
Expand All @@ -106,7 +105,6 @@ public void testToAndFromXContentEmbedded() throws Exception {
parsedEmbeddedGetResult = GetResult.fromXContentEmbedded(parser);
assertNull(parser.nextToken());
}

assertEquals(expectedGetResult, parsedEmbeddedGetResult);
//print the parsed object out and test that the output is the same as the original output
BytesReference finalBytes = toXContentEmbedded(parsedEmbeddedGetResult, xContentType, humanReadable);
Expand Down Expand Up @@ -203,16 +201,17 @@ public static Tuple<GetResult, GetResult> randomGetResult(XContentType xContentT
return Tuple.tuple(getResult, expectedGetResult);
}

private static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> randomDocumentFields(XContentType xContentType) {
public static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> randomDocumentFields(XContentType xContentType) {
int numFields = randomIntBetween(2, 10);
Map<String, DocumentField> fields = new HashMap<>(numFields);
Map<String, DocumentField> expectedFields = new HashMap<>(numFields);
for (int i = 0; i < numFields; i++) {
while (fields.size() < numFields) {
Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType);
DocumentField getField = tuple.v1();
DocumentField expectedGetField = tuple.v2();
fields.put(getField.getName(), getField);
expectedFields.put(expectedGetField.getName(), expectedGetField);
if (fields.putIfAbsent(getField.getName(), getField) == null) {
assertNull(expectedFields.putIfAbsent(expectedGetField.getName(), expectedGetField));
}
}
return Tuple.tuple(fields, expectedFields);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@ public void injectRandomFailures() {
enableRandomFailures = randomBoolean();
}

protected void disableRandomFailures() {
enableRandomFailures = false;
}

protected void failRandomly() {
if (enableRandomFailures && rarely()) {
throw new RuntimeException("dummy test failure");
Expand Down
Loading

0 comments on commit 2245812

Please sign in to comment.