From 03d0e191c684424f2b5c4eff71a31ee280004a4c Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Wed, 16 Sep 2020 08:04:20 -0500 Subject: [PATCH] Fix failure in AppendProcessorTests.testAppendingToListWithDuplicatesDisallowed (#62385) --- .../ingest/common/AppendProcessorTests.java | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java index ddd38f09e1bec..f97f0aba59f86 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument.Metadata; import org.elasticsearch.ingest.Processor; @@ -30,8 +31,12 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -190,23 +195,26 @@ public void testAppendingUniqueValueToScalar() throws Exception { public void testAppendingToListWithDuplicatesDisallowed() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - List list = new ArrayList<>(); int size = randomIntBetween(0, 10); - for (int i = 0; i < size; i++) { - list.add(randomAlphaOfLengthBetween(1, 10)); - } + List list = Stream.generate(() -> randomAlphaOfLengthBetween(1, 10)).limit(size).collect(Collectors.toList()); String originalField = RandomDocumentPicks.addRandomField(random(), ingestDocument, list); List expectedValues = new ArrayList<>(list); List existingValues = randomSubsetOf(list); - int uniqueValuesSize = randomIntBetween(0, 10); - List uniqueValues = new ArrayList<>(); - for (int i = 0; i < uniqueValuesSize; i++) { - uniqueValues.add(randomAlphaOfLengthBetween(1, 10)); - } + + // generate new values + int nonexistingValuesSize = randomIntBetween(0, 10); + Set newValues = Stream.generate(() -> randomAlphaOfLengthBetween(1, 10)) + .limit(nonexistingValuesSize) + .collect(Collectors.toSet()); + + // create a set using the new values making sure there are no overlapping values already present in the existing values + Set nonexistingValues = Sets.difference(newValues, new HashSet<>(existingValues)); List valuesToAppend = new ArrayList<>(existingValues); - valuesToAppend.addAll(uniqueValues); - expectedValues.addAll(uniqueValues); + valuesToAppend.addAll(nonexistingValues); + expectedValues.addAll(nonexistingValues); Collections.sort(valuesToAppend); + + // attempt to append both new and existing values Processor appendProcessor = createAppendProcessor(originalField, valuesToAppend, false); appendProcessor.execute(ingestDocument); List fieldValue = ingestDocument.getFieldValue(originalField, List.class);