Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into setting-initial-mas…
Browse files Browse the repository at this point in the history
…ter-node

Signed-off-by: Tianli Feng <[email protected]>
  • Loading branch information
Tianli Feng committed Mar 15, 2022
2 parents cf22874 + 7df40ee commit 7d96d3f
Show file tree
Hide file tree
Showing 36 changed files with 332 additions and 533 deletions.
2 changes: 1 addition & 1 deletion plugins/repository-azure/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ dependencies {
api "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${versions.jackson}"
api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}"
api 'org.codehaus.woodstox:stax2-api:4.2.1'
implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1'
implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8'
runtimeOnly 'com.google.guava:guava:31.1-jre'
api 'org.apache.commons:commons-lang3:3.12.0'
testImplementation project(':test:fixtures:azure-fixture')
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
670748292899c53b1963730d9eb7f8ab71314e90
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
search:
rest_total_hits_as_int: true
index: "search_index"
body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } }
body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } }
- do:
indices.create:
index: lookup_index
Expand All @@ -64,7 +64,7 @@
search:
rest_total_hits_as_int: true
index: "search_index"
body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } }
body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } }

- match: { _shards.total: 5 }
- match: { _shards.successful: 5 }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -840,6 +840,11 @@ public void testTaskStoringSuccessfulResult() throws Exception {
GetTaskResponse getResponse = expectFinishedTask(taskId);
assertEquals(result, getResponse.getTask().getResponseAsMap());
assertNull(getResponse.getTask().getError());

// run it again to check that the tasks index has been successfully created and can be re-used
client().execute(TestTaskPlugin.TestTaskAction.INSTANCE, request).get();
events = findEvents(TestTaskPlugin.TestTaskAction.NAME, Tuple::v1);
assertEquals(2, events.size());
}

public void testTaskStoringFailureResult() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.env.NodeEnvironment;
import org.opensearch.index.mapper.MapperParsingException;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.indices.IndexClosedException;
import org.opensearch.indices.ShardLimitValidator;
import org.opensearch.test.OpenSearchIntegTestCase;
Expand Down Expand Up @@ -123,9 +122,8 @@ public void testMappingMetadataParsed() throws Exception {
.getState()
.metadata()
.index("test")
.getMappings()
.get(MapperService.SINGLE_MAPPING_NAME);
assertThat(mappingMd.routing().required(), equalTo(true));
.mapping();
assertThat(mappingMd.routingRequired(), equalTo(true));

logger.info("--> restarting nodes...");
internalCluster().fullRestart();
Expand All @@ -134,17 +132,8 @@ public void testMappingMetadataParsed() throws Exception {
ensureYellow();

logger.info("--> verify meta _routing required exists");
mappingMd = client().admin()
.cluster()
.prepareState()
.execute()
.actionGet()
.getState()
.metadata()
.index("test")
.getMappings()
.get(MapperService.SINGLE_MAPPING_NAME);
assertThat(mappingMd.routing().required(), equalTo(true));
mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata().index("test").mapping();
assertThat(mappingMd.routingRequired(), equalTo(true));
}

public void testSimpleOpenClose() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,11 +153,7 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception {

// make sure it was also written on red node although index is closed
ImmutableOpenMap<String, IndexMetadata> indicesMetadata = getIndicesMetadataOnNode(dataNode);
assertNotNull(
((Map<String, ?>) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get(
"integer_field"
)
);
assertNotNull(((Map<String, ?>) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("integer_field"));
assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE));

/* Try the same and see if this also works if node was just restarted.
Expand Down Expand Up @@ -190,9 +186,7 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception {

// make sure it was also written on red node although index is closed
indicesMetadata = getIndicesMetadataOnNode(dataNode);
assertNotNull(
((Map<String, ?>) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field")
);
assertNotNull(((Map<String, ?>) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("float_field"));
assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE));

// finally check that meta data is also written of index opened again
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1195,75 +1195,63 @@ public void testTermsLookupFilter() throws Exception {
);

SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms")))
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms")))
.get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");

// same as above, just on the _id...
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "type", "1", "terms")))
.get();
searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "1", "terms"))).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");

// another search with same parameters...
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms")))
.get();
searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");

searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "2", "terms")))
.get();
searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "2", "terms"))).get();
assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));

searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "3", "terms")))
.get();
searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "3", "terms"))).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "2", "4");

searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "4", "terms")))
.get();
searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "4", "terms"))).get();
assertHitCount(searchResponse, 0L);

searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "1", "arr.term")))
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "1", "arr.term")))
.get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");

searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "2", "arr.term")))
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "2", "arr.term")))
.get();
assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));

searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "3", "arr.term")))
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "3", "arr.term")))
.get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "2", "4");

searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term")))
.setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "3", "arr.term")))
.get();
assertHitCount(searchResponse, 0L);

// index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "missing", "terms")))
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "missing", "terms")))
.get();
assertHitCount(searchResponse, 0L);

// index "lookup3" type "type" has the source disabled: ignore the lookup terms
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "type", "1", "terms")))
.get();
searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "1", "terms"))).get();
assertHitCount(searchResponse, 0L);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@

import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
import static org.opensearch.index.query.QueryBuilders.queryStringQuery;
import static org.opensearch.index.query.QueryBuilders.rangeQuery;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.allOf;
Expand Down Expand Up @@ -490,7 +491,7 @@ public void testExplainTermsQueryWithLookup() throws Exception {
client().prepareIndex("twitter").setId("1").setSource("followers", new int[] { 1, 2, 3 }).get();
refresh();

TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "_doc", "1", "followers"));
TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "1", "followers"));
ValidateQueryResponse response = client().admin()
.indices()
.prepareValidateQuery("twitter")
Expand All @@ -500,4 +501,100 @@ public void testExplainTermsQueryWithLookup() throws Exception {
.actionGet();
assertThat(response.isValid(), is(true));
}

// Issue: https://github.com/opensearch-project/OpenSearch/issues/2036
public void testValidateDateRangeInQueryString() throws IOException {
assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)));

assertAcked(
client().admin()
.indices()
.preparePutMapping("test")
.setSource(
XContentFactory.jsonBuilder()
.startObject()
.startObject(MapperService.SINGLE_MAPPING_NAME)
.startObject("properties")
.startObject("name")
.field("type", "keyword")
.endObject()
.startObject("timestamp")
.field("type", "date")
.endObject()
.endObject()
.endObject()
.endObject()
)
);

client().prepareIndex("test").setId("1").setSource("name", "username", "timestamp", 200).get();
refresh();

ValidateQueryResponse response = client().admin()
.indices()
.prepareValidateQuery()
.setQuery(
QueryBuilders.boolQuery()
.must(rangeQuery("timestamp").gte(0).lte(100))
.must(queryStringQuery("username").allowLeadingWildcard(false))
)
.setRewrite(true)
.get();

assertNoFailures(response);
assertThat(response.isValid(), is(true));

// Use wildcard and date outside the range
response = client().admin()
.indices()
.prepareValidateQuery()
.setQuery(
QueryBuilders.boolQuery()
.must(rangeQuery("timestamp").gte(0).lte(100))
.must(queryStringQuery("*erna*").allowLeadingWildcard(false))
)
.setRewrite(true)
.get();

assertNoFailures(response);
assertThat(response.isValid(), is(false));

// Use wildcard and date inside the range
response = client().admin()
.indices()
.prepareValidateQuery()
.setQuery(
QueryBuilders.boolQuery()
.must(rangeQuery("timestamp").gte(0).lte(1000))
.must(queryStringQuery("*erna*").allowLeadingWildcard(false))
)
.setRewrite(true)
.get();

assertNoFailures(response);
assertThat(response.isValid(), is(false));

// Use wildcard and date inside the range (allow leading wildcard)
response = client().admin()
.indices()
.prepareValidateQuery()
.setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte(0).lte(1000)).must(queryStringQuery("*erna*")))
.setRewrite(true)
.get();

assertNoFailures(response);
assertThat(response.isValid(), is(true));

// Use invalid date range
response = client().admin()
.indices()
.prepareValidateQuery()
.setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte("aaa").lte(100)))
.setRewrite(true)
.get();

assertNoFailures(response);
assertThat(response.isValid(), is(false));

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener
if (request.query() == null) {
rewriteListener.onResponse(request.query());
} else {
Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider), rewriteListener);
Rewriteable.rewriteAndFetch(request.query(), searchService.getValidationRewriteContext(timeProvider), rewriteListener);
}
}

Expand Down Expand Up @@ -225,7 +225,7 @@ protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest re
request.nowInMillis(),
request.filteringAliases()
);
SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT);
SearchContext searchContext = searchService.createValidationContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT);
try {
ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query());
searchContext.parsedQuery(parsedQuery);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ public VersionType versionType() {
public void process(Version indexCreatedVersion, @Nullable MappingMetadata mappingMd, String concreteIndex) {
if (mappingMd != null) {
// might as well check for routing here
if (mappingMd.routing().required() && routing == null) {
if (mappingMd.routingRequired() && routing == null) {
throw new RoutingMissingException(concreteIndex, id);
}
}
Expand Down
Loading

0 comments on commit 7d96d3f

Please sign in to comment.