Skip to content

Commit

Permalink
Merge branch 'opensearch-project:main' into cs-test-changes
Browse files Browse the repository at this point in the history
  • Loading branch information
Gankris96 authored Nov 7, 2023
2 parents 7a3c643 + 0ba5d58 commit 56be59e
Show file tree
Hide file tree
Showing 10 changed files with 162 additions and 20 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370))
- Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496))
- Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737))
- Fix SuggestSearch.testSkipDuplicates by forceing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068))
- Adding version condition while adding geoshape doc values to the index, to ensure backward compatibility.([#11095](https://github.com/opensearch-project/OpenSearch/pull/11095))

### Security

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ protected boolean forbidPrivateIndexSettings() {
*/
protected void prepareGeoShapeIndexForAggregations(final Random random) throws Exception {
expectedDocsCountForGeoShapes = new HashMap<>();
final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
final List<IndexRequestBuilder> geoshapes = new ArrayList<>();
assertAcked(prepareCreate(GEO_SHAPE_INDEX_NAME).setSettings(settings).setMapping(GEO_SHAPE_FIELD_NAME, "type" + "=geo_shape"));
boolean isShapeIntersectingBB = false;
Expand Down Expand Up @@ -136,7 +136,7 @@ protected void prepareSingleValueGeoPointIndex(final Random random) throws Excep
expectedDocCountsForSingleGeoPoint = new HashMap<>();
createIndex("idx_unmapped");
final Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.number_of_shards", 4)
.put("index.number_of_replicas", 0)
.build();
Expand All @@ -160,7 +160,7 @@ protected void prepareSingleValueGeoPointIndex(final Random random) throws Excep

protected void prepareMultiValuedGeoPointIndex(final Random random) throws Exception {
multiValuedExpectedDocCountsGeoPoint = new HashMap<>();
final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
final Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
final List<IndexRequestBuilder> cities = new ArrayList<>();
assertAcked(
prepareCreate("multi_valued_idx").setSettings(settings)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
---
"Insert Document with geoshape field":
- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "geo_shape_index_old", "_id":191}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [101.0, 0.0] ]}}'
- '{"index": {"_index": "geo_shape_index_old", "_id":219}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [106.0, 0.0] ]}}'

- do:
search:
rest_total_hits_as_int: true
index: geo_shape_index_old
- match: { hits.total: 2 }
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
"Create index with Geoshape field":
- do:
indices.create:
index: geo_shape_index_old
body:
settings:
index:
number_of_replicas: 2
mappings:
"properties":
"location":
"type": "geo_shape"

- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "geo_shape_index_old", "_id":191}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [101.0, 0.0] ]}}'
- '{"index": {"_index": "geo_shape_index_old", "_id":219}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [106.0, 0.0] ]}}'

- do:
search:
rest_total_hits_as_int: true
index: geo_shape_index_old
- match: { hits.total: 2 }
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
---
"Validate we are able to index documents after upgrade":
- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "geo_shape_index_old", "_id":191}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [101.0, 0.0] ]}}'
- '{"index": {"_index": "geo_shape_index_old", "_id":219}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [106.0, 0.0] ]}}'

- do:
search:
rest_total_hits_as_int: true
index: geo_shape_index_old
- match: { hits.total: 2 }


---
"Create index with Geoshape field in new cluster":
- do:
indices.create:
index: geo_shape_index_new
body:
settings:
index:
number_of_replicas: 2
mappings:
"properties":
"location":
"type": "geo_shape"

- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "geo_shape_index_new", "_id":191}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [101.0, 0.0] ]}}'
- '{"index": {"_index": "geo_shape_index_new", "_id":219}}'
- '{"name": "NEMO Science Museum","location": {"type": "envelope","coordinates": [ [100.0, 1.0], [106.0, 0.0] ]}}'

- do:
search:
rest_total_hits_as_int: true
index: geo_shape_index_new
- match: { hits.total: 2 }

- do:
search:
rest_total_hits_as_int: true
index: geo_shape_index_new
body:
aggregations:
myaggregation:
geo_bounds:
field: "location"
- match: { hits.total: 2 }
- match: { aggregations.myaggregation.bounds.top_left.lat: 0.9999999823048711 }
- match: { aggregations.myaggregation.bounds.top_left.lon: 99.99999999068677 }
- match: { aggregations.myaggregation.bounds.bottom_right.lat: 0.0 }
- match: { aggregations.myaggregation.bounds.bottom_right.lon: 105.99999996833503 }
Original file line number Diff line number Diff line change
Expand Up @@ -244,10 +244,6 @@ public void testWithIndexAlias() {
}

public void testWithIndexFilter() throws InterruptedException {
assumeFalse(
"Concurrent search case muted pending fix: https://github.com/opensearch-project/OpenSearch/issues/10433",
internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING)
);
assertAcked(prepareCreate("index-1").setMapping("timestamp", "type=date", "field1", "type=keyword"));
assertAcked(prepareCreate("index-2").setMapping("timestamp", "type=date", "field1", "type=long"));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
import java.util.Set;

import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.opensearch.action.support.WriteRequest.RefreshPolicy.WAIT_UNTIL;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
Expand Down Expand Up @@ -1171,6 +1172,7 @@ public void testSkipDuplicates() throws Exception {
createIndexAndMapping(mapping);
int numDocs = randomIntBetween(10, 100);
int numUnique = randomIntBetween(1, numDocs);
logger.info("Suggestion duplicate parameters: numDocs {} numUnique {}", numDocs, numUnique);
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
int[] weights = new int[numUnique];
Integer[] termIds = new Integer[numUnique];
Expand All @@ -1180,8 +1182,10 @@ public void testSkipDuplicates() throws Exception {
int weight = randomIntBetween(0, 100);
weights[id] = Math.max(weight, weights[id]);
String suggestion = "suggestion-" + String.format(Locale.ENGLISH, "%03d", id);
logger.info("Creating {}, id {}, weight {}", suggestion, i, id, weight);
indexRequestBuilders.add(
client().prepareIndex(INDEX)
.setRefreshPolicy(WAIT_UNTIL)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
Expand All @@ -1195,10 +1199,12 @@ public void testSkipDuplicates() throws Exception {
indexRandom(true, indexRequestBuilders);

Arrays.sort(termIds, Comparator.comparingInt(o -> weights[(int) o]).reversed().thenComparingInt(a -> (int) a));
logger.info("Expected terms id ordered {}", (Object[]) termIds);
String[] expected = new String[numUnique];
for (int i = 0; i < termIds.length; i++) {
expected[i] = "suggestion-" + String.format(Locale.ENGLISH, "%03d", termIds[i]);
}
logger.info("Expected suggestions field values {}", (Object[]) expected);
CompletionSuggestionBuilder completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD)
.prefix("sugg")
.skipDuplicates(true)
Expand All @@ -1207,6 +1213,7 @@ public void testSkipDuplicates() throws Exception {
SearchResponse searchResponse = client().prepareSearch(INDEX)
.suggest(new SuggestBuilder().addSuggestion("suggestions", completionSuggestionBuilder))
.get();
logger.info("Search Response with Suggestions {}", searchResponse);
assertSuggestions(searchResponse, true, "suggestions", expected);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,15 @@

package org.opensearch.index.mapper;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.LatLonShape;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.opensearch.Version;
import org.opensearch.common.Explicit;
import org.opensearch.common.geo.GeometryParser;
import org.opensearch.common.geo.ShapeRelation;
Expand Down Expand Up @@ -77,6 +80,7 @@
* @opensearch.internal
*/
public class GeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<Geometry, Geometry> {
private static final Logger logger = LogManager.getLogger(GeoShapeFieldMapper.class);
public static final String CONTENT_TYPE = "geo_shape";
public static final FieldType FIELD_TYPE = new FieldType();
static {
Expand Down Expand Up @@ -205,9 +209,24 @@ protected void addDocValuesFields(
final List<IndexableField> indexableFields,
final ParseContext context
) {
Field[] fieldsArray = new Field[indexableFields.size()];
fieldsArray = indexableFields.toArray(fieldsArray);
context.doc().add(LatLonShape.createDocValueField(name, fieldsArray));
/*
* We are adding the doc values for GeoShape only if the index is created with 2.9 and above version of
* OpenSearch. If we don't do that after the upgrade of OpenSearch customers are not able to index documents
* with GeoShape fields. Github issue: https://github.com/opensearch-project/OpenSearch/issues/10958,
* https://github.com/opensearch-project/OpenSearch/issues/10795
*/
if (context.indexSettings().getIndexVersionCreated().onOrAfter(Version.V_2_9_0)) {
Field[] fieldsArray = new Field[indexableFields.size()];
fieldsArray = indexableFields.toArray(fieldsArray);
context.doc().add(LatLonShape.createDocValueField(name, fieldsArray));
} else {
logger.warn(
"The index was created with Version : {}, for geoshape doc values to work index must be "
+ "created with OpenSearch Version : {} or above",
context.indexSettings().getIndexVersionCreated(),
Version.V_2_9_0
);
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.opensearch.indices.replication.common.ReplicationFailedException;
import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.test.CorruptionUtils;
import org.opensearch.test.junit.annotations.TestLogging;
import org.hamcrest.MatcherAssert;
import org.junit.Assert;

Expand Down Expand Up @@ -297,31 +298,39 @@ public void testPrimaryRestart_PrimaryHasExtraCommits() throws Exception {
}
}

@TestLogging(reason = "Getting trace logs from replication package", value = "org.opensearch.indices.replication:TRACE")
public void testRepicaCleansUpOldCommitsWhenReceivingNew() throws Exception {
final Path remotePath = createTempDir();
try (ReplicationGroup shards = createGroup(1, getIndexSettings(), indexMapping, new NRTReplicationEngineFactory(), remotePath)) {
shards.startAll();
final IndexShard primary = shards.getPrimary();
final IndexShard replica = shards.getReplicas().get(0);
final Store store = replica.store();
final SegmentInfos initialCommit = store.readLastCommittedSegmentsInfo();
shards.indexDocs(1);
flushShard(primary);
replicateSegments(primary, shards.getReplicas());

assertDocCount(primary, 1);
assertDocCount(replica, 1);
assertEquals("segments_5", replica.store().readLastCommittedSegmentsInfo().getSegmentsFileName());
assertSingleSegmentFile(replica, "segments_5");
assertSingleSegmentFile(replica);
final SegmentInfos secondCommit = store.readLastCommittedSegmentsInfo();
assertTrue(secondCommit.getGeneration() > initialCommit.getGeneration());

shards.indexDocs(1);
primary.refresh("test");
replicateSegments(primary, shards.getReplicas());
assertDocCount(replica, 2);
assertSingleSegmentFile(replica, "segments_5");
assertSingleSegmentFile(replica);
assertEquals(store.readLastCommittedSegmentsInfo().getGeneration(), secondCommit.getGeneration());

shards.indexDocs(1);
flushShard(primary);
replicateSegments(primary, shards.getReplicas());
assertDocCount(replica, 3);
assertSingleSegmentFile(replica, "segments_6");
assertSingleSegmentFile(replica);
final SegmentInfos thirdCommit = store.readLastCommittedSegmentsInfo();
assertTrue(thirdCommit.getGeneration() > secondCommit.getGeneration());

final Store.RecoveryDiff diff = Store.segmentReplicationDiff(primary.getSegmentMetadataMap(), replica.getSegmentMetadataMap());
assertTrue(diff.missing.isEmpty());
Expand Down Expand Up @@ -571,11 +580,10 @@ protected void validateShardIdleWithNoReplicas(IndexShard primary) {
assertFalse(primary.hasRefreshPending());
}

private void assertSingleSegmentFile(IndexShard shard, String fileName) throws IOException {
private void assertSingleSegmentFile(IndexShard shard) throws IOException {
final Set<String> segmentsFileNames = Arrays.stream(shard.store().directory().listAll())
.filter(file -> file.startsWith(IndexFileNames.SEGMENTS))
.collect(Collectors.toSet());
assertEquals("Expected a single segment file", 1, segmentsFileNames.size());
assertEquals(segmentsFileNames.stream().findFirst().get(), fileName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -336,12 +336,17 @@ public void testMinScoreDisablesCountOptimization() throws Exception {
assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation);
assertProfileData(context, "MatchAllDocsQuery", query -> {
assertThat(query.getTimeBreakdown().keySet(), not(empty()));
assertThat(query.getTimeBreakdown().get("score"), greaterThanOrEqualTo(100L));
assertThat(query.getTimeBreakdown().get("score"), greaterThanOrEqualTo(1L));
assertThat(query.getTimeBreakdown().get("score_count"), equalTo(1L));
if (executor != null) {
assertThat(query.getTimeBreakdown().get("max_score"), greaterThanOrEqualTo(100L));
assertThat(query.getTimeBreakdown().get("min_score"), greaterThanOrEqualTo(100L));
assertThat(query.getTimeBreakdown().get("avg_score"), greaterThanOrEqualTo(100L));
long maxScore = query.getTimeBreakdown().get("max_score");
long minScore = query.getTimeBreakdown().get("min_score");
long avgScore = query.getTimeBreakdown().get("avg_score");
assertThat(maxScore, greaterThanOrEqualTo(1L));
assertThat(minScore, greaterThanOrEqualTo(1L));
assertThat(avgScore, greaterThanOrEqualTo(1L));
assertThat(maxScore, greaterThanOrEqualTo(avgScore));
assertThat(avgScore, greaterThanOrEqualTo(minScore));
assertThat(query.getTimeBreakdown().get("max_score_count"), equalTo(1L));
assertThat(query.getTimeBreakdown().get("min_score_count"), equalTo(1L));
assertThat(query.getTimeBreakdown().get("avg_score_count"), equalTo(1L));
Expand Down

0 comments on commit 56be59e

Please sign in to comment.