Skip to content

Commit

Permalink
Support for Elasticsearch 7.0.1 (opendistro-for-elasticsearch#47)
Browse files Browse the repository at this point in the history
  • Loading branch information
penghuo committed May 10, 2019
1 parent 551d8d6 commit bc1ba1e
Show file tree
Hide file tree
Showing 30 changed files with 189 additions and 142 deletions.
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
buildscript {

ext {
es_version = System.getProperty("es.version", "6.7.1")
es_version = System.getProperty("es.version", "7.0.1")
}
// This isn't applying from repositories.gradle so repeating it here
repositories {
Expand Down Expand Up @@ -95,7 +95,7 @@ compileTestJava {
}

// TODO: Need to update integration test to use ElasticSearch test framework
unitTest {
test {
include '**/*Test.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/intgtest/**'
// Gradle runs unit test using a working directory other and project root
Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# permissions and limitations under the License.
#

version=0.7
version=1.0.0
18 changes: 2 additions & 16 deletions gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,20 +1,6 @@
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#

#Thu May 02 16:03:13 PDT 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.3-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.3-all.zip
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import java.util.Map;

import static java.util.stream.Collectors.joining;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketSelector;
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketSelector;

/**
* Domain object for HAVING clause in SQL which covers both the parsing and explain logic.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;

import com.amazon.opendistroforelasticsearch.sql.exception.SqlParseException;
import org.elasticsearch.action.search.SearchResponse;
Expand All @@ -34,8 +35,8 @@
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount;
import org.elasticsearch.search.aggregations.metrics.InternalTopHits;
import org.elasticsearch.search.aggregations.metrics.InternalValueCount;

public class SearchResult {
/**
Expand All @@ -49,7 +50,7 @@ public class SearchResult {

public SearchResult(SearchResponse resp) {
SearchHits hits = resp.getHits();
this.total = hits.getTotalHits();
this.total = Optional.ofNullable(hits.getTotalHits()).map(totalHits -> totalHits.value).orElse(0L);
results = new ArrayList<>(hits.getHits().length);
for (SearchHit searchHit : hits.getHits()) {
if (searchHit.getSourceAsMap() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
import org.elasticsearch.search.aggregations.metrics.GeoBounds;
import org.elasticsearch.search.aggregations.metrics.Percentile;
import org.elasticsearch.search.aggregations.metrics.Percentiles;
import org.elasticsearch.search.aggregations.metrics.Stats;
import org.elasticsearch.search.aggregations.metrics.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.TopHits;
import com.amazon.opendistroforelasticsearch.sql.utils.Util;

import java.util.*;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;

import static org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
Expand Down Expand Up @@ -430,7 +431,7 @@ private void extractData() {
SearchHits searchHits = (SearchHits) queryResult;

this.size = searchHits.getHits().length;
this.totalHits = searchHits.totalHits;
this.totalHits = Optional.ofNullable(searchHits.getTotalHits()).map(th -> th.value).orElse(0L);
this.rows = populateRows(searchHits);

} else if (queryResult instanceof Aggregations) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,19 @@

package com.amazon.opendistroforelasticsearch.sql.executor.join;

import com.amazon.opendistroforelasticsearch.sql.domain.Field;
import com.amazon.opendistroforelasticsearch.sql.exception.SqlParseException;
import com.amazon.opendistroforelasticsearch.sql.executor.ElasticHitsExecutor;
import com.amazon.opendistroforelasticsearch.sql.query.SqlElasticRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.join.HashJoinElasticRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.join.JoinRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.join.NestedLoopsElasticRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.join.TableInJoinRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.planner.HashJoinQueryPlanRequestBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
Expand All @@ -35,10 +41,6 @@
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import com.amazon.opendistroforelasticsearch.sql.domain.Field;
import com.amazon.opendistroforelasticsearch.sql.exception.SqlParseException;
import com.amazon.opendistroforelasticsearch.sql.query.SqlElasticRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.planner.HashJoinQueryPlanRequestBuilder;

import java.io.IOException;
import java.util.Collection;
Expand Down Expand Up @@ -102,7 +104,7 @@ public void run() throws IOException, SqlParseException {
protected abstract List<SearchHit> innerRun() throws IOException, SqlParseException ;

public SearchHits getHits(){
return new SearchHits(results.toArray(new SearchHit[results.size()]), results.size(), 1.0f);
return new SearchHits(results.toArray(new SearchHit[results.size()]), new TotalHits(results.size(), Relation.EQUAL_TO), 1.0f);
}

public static ElasticJoinExecutor createJoinExecutor(Client client, SqlElasticRequestBuilder requestBuilder){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,10 @@

package com.amazon.opendistroforelasticsearch.sql.executor.join;

import com.amazon.opendistroforelasticsearch.sql.domain.Select;
import com.amazon.opendistroforelasticsearch.sql.query.join.BackOffRetryStrategy;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
Expand All @@ -30,12 +33,11 @@
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import com.amazon.opendistroforelasticsearch.sql.domain.Select;
import com.amazon.opendistroforelasticsearch.sql.query.join.BackOffRetryStrategy;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;

import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;

Expand All @@ -60,11 +62,11 @@ public static SearchResponse scrollOneTimeWithHits(Client client, SearchRequestB
}


//use our deserializer instead of results toXcontent because the source field is differnet from sourceAsMap.
//use our deserializer instead of results toXcontent because the source field is different from sourceAsMap.
public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) throws IOException {
if(results == null) return null;
Object[] searchHits;
searchHits = new Object[(int) results.getTotalHits()];
searchHits = new Object[Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L).intValue()];
int i = 0;
for(SearchHit hit : results) {
HashMap<String,Object> value = new HashMap<>();
Expand All @@ -76,7 +78,10 @@ public static String hitsAsStringResult(SearchHits results, MetaSearchResult met
i++;
}
HashMap<String,Object> hits = new HashMap<>();
hits.put("total",results.getTotalHits());
hits.put("total", ImmutableMap.of(
"value", Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L),
"relation", Optional.ofNullable(results.getTotalHits()).map(th -> th.relation).orElse(Relation.EQUAL_TO)
));
hits.put("max_score",results.getMaxScore());
hits.put("hits",searchHits);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,16 @@
package com.amazon.opendistroforelasticsearch.sql.executor.join;

import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource;
import com.amazon.opendistroforelasticsearch.sql.domain.Condition;
import com.amazon.opendistroforelasticsearch.sql.domain.Select;
import com.amazon.opendistroforelasticsearch.sql.domain.Where;
import com.amazon.opendistroforelasticsearch.sql.esdomain.ESClient;
import com.amazon.opendistroforelasticsearch.sql.exception.SqlParseException;
import com.amazon.opendistroforelasticsearch.sql.query.DefaultQueryAction;
import com.amazon.opendistroforelasticsearch.sql.query.join.BackOffRetryStrategy;
import com.amazon.opendistroforelasticsearch.sql.query.join.NestedLoopsElasticRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.join.TableInJoinRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.maker.Maker;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.search.MultiSearchRequest;
Expand All @@ -28,15 +37,6 @@
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import com.amazon.opendistroforelasticsearch.sql.domain.Condition;
import com.amazon.opendistroforelasticsearch.sql.domain.Select;
import com.amazon.opendistroforelasticsearch.sql.domain.Where;
import com.amazon.opendistroforelasticsearch.sql.exception.SqlParseException;
import com.amazon.opendistroforelasticsearch.sql.query.DefaultQueryAction;
import com.amazon.opendistroforelasticsearch.sql.query.join.BackOffRetryStrategy;
import com.amazon.opendistroforelasticsearch.sql.query.join.NestedLoopsElasticRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.join.TableInJoinRequestBuilder;
import com.amazon.opendistroforelasticsearch.sql.query.maker.Maker;

import java.util.ArrayList;
import java.util.List;
Expand Down Expand Up @@ -224,7 +224,7 @@ private FetchWithScrollResponse firstFetch(TableInJoinRequestBuilder tableReques
else {
//scroll request with max.
responseWithHits = scrollOneTimeWithMax(client,tableRequest);
if(responseWithHits.getHits().getTotalHits() < MAX_RESULTS_ON_ONE_FETCH)
if(responseWithHits.getHits().getTotalHits() != null && responseWithHits.getHits().getTotalHits().value < MAX_RESULTS_ON_ONE_FETCH)
needScrollForFirstTable = true;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

import com.amazon.opendistroforelasticsearch.sql.executor.ElasticHitsExecutor;
import com.amazon.opendistroforelasticsearch.sql.executor.join.ElasticUtils;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.document.DocumentField;
Expand Down Expand Up @@ -136,7 +138,7 @@ private void fillMinusHitsFromOneField(String fieldName, Set<Object> fieldValues
}
int totalSize = currentId - 1;
SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]);
this.minusHits = new SearchHits(unionHitsArr, totalSize,1.0f);
this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f);
}

private void fillMinusHitsFromResults(Set<ComperableHitResult> comperableHitResults) {
Expand Down Expand Up @@ -164,7 +166,7 @@ private void fillMinusHitsFromResults(Set<ComperableHitResult> comperableHitResu
}
int totalSize = currentId - 1;
SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]);
this.minusHits = new SearchHits(unionHitsArr, totalSize,1.0f);
this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f);
}

private Set<ComperableHitResult> runWithScrollings() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
package com.amazon.opendistroforelasticsearch.sql.executor.multi;

import com.amazon.opendistroforelasticsearch.sql.executor.ElasticHitsExecutor;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.text.Text;
Expand Down Expand Up @@ -54,7 +56,7 @@ public void run() throws IOException, SqlParseException {
fillInternalSearchHits(unionHits,secondResponse.getHits().getHits(),this.multiQueryBuilder.getSecondTableFieldToAlias());
int totalSize = unionHits.size();
SearchHit[] unionHitsArr = unionHits.toArray(new SearchHit[totalSize]);
this.results = new SearchHits(unionHitsArr, totalSize,1.0f);
this.results = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO),1.0f);
}

private void fillInternalSearchHits(List<SearchHit> unionHits, SearchHit[] hits, Map<String, String> fieldNameToAlias) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,12 @@
import org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.joda.time.DateTimeZone;
import com.amazon.opendistroforelasticsearch.sql.utils.Util;
import com.amazon.opendistroforelasticsearch.sql.parser.ChildrenType;
import com.amazon.opendistroforelasticsearch.sql.parser.NestedType;
Expand Down Expand Up @@ -473,7 +472,7 @@ private ValuesSourceAggregationBuilder dateRange(MethodField field) {
dateRange.format(value);
continue;
} else if ("time_zone".equals(kv.key)) {
dateRange.timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone(ZoneOffset.of(value))));
dateRange.timeZone(ZoneOffset.of(value));
continue;
} else if ("from".equals(kv.key)) {
dateRange.addUnboundedFrom(kv.value.toString());
Expand Down Expand Up @@ -523,7 +522,7 @@ private DateHistogramAggregationBuilder dateHistogram(MethodField field) throws
dateHistogram.format(value);
break;
case "time_zone":
dateHistogram.timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone(ZoneOffset.of(value))));
dateHistogram.timeZone(ZoneOffset.of(value));
break;
case "min_doc_count":
dateHistogram.minDocCount(Long.parseLong(value));
Expand Down
Loading

0 comments on commit bc1ba1e

Please sign in to comment.