Skip to content
This repository has been archived by the owner on Aug 2, 2022. It is now read-only.

SQL release for Elasticsearch 7.10 #834

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

buildscript {
ext {
es_version = "7.9.1"
es_version = "7.10.0"
}

repositories {
Expand Down Expand Up @@ -43,7 +43,7 @@ repositories {
}

ext {
opendistroVersion = '1.11.0'
opendistroVersion = '1.12.0'
isSnapshot = "true" == System.getProperty("build.snapshot", "true")
}

Expand Down
21 changes: 18 additions & 3 deletions doctest/build.gradle
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
import org.elasticsearch.gradle.testclusters.RunTask

plugins {
id "com.wiredforcode.spawn" version "0.8.2"
id 'base'
}

apply plugin: 'elasticsearch.testclusters'

def path = project(':').projectDir
// temporary fix, because currently we are under migration to new architecture. Need to run ./gradlew run from
// plugin module, and will only build ppl in it.
def plugin_path = project(':plugin').projectDir
def plugin_path = project(':doctest').projectDir

task bootstrap(type: Exec) {
inputs.file "$projectDir/bootstrap.sh"
Expand All @@ -17,7 +21,7 @@ task bootstrap(type: Exec) {

//evaluationDependsOn(':')
task startES(type: SpawnProcessTask) {
command "${path}/gradlew -p ${plugin_path} run"
command "${path}/gradlew -p ${plugin_path} runRestTestCluster"
ready 'started'
}

Expand All @@ -36,4 +40,15 @@ doctest.dependsOn startES
doctest.finalizedBy stopES

build.dependsOn doctest
clean.dependsOn(cleanBootstrap)
clean.dependsOn(cleanBootstrap)

testClusters {
docTestCluster {
plugin ':plugin'
testDistribution = 'oss'
}
}
tasks.register("runRestTestCluster", RunTask) {
description = 'Runs elasticsearch ODFE SQL plugin'
useCluster testClusters.docTestCluster;
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import java.util.Map;
import java.util.function.Function;
import java.util.function.Predicate;
import lombok.RequiredArgsConstructor;
import org.apache.logging.log4j.ThreadContext;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.node.NodeClient;
Expand All @@ -39,7 +38,6 @@
import org.elasticsearch.threadpool.ThreadPool;

/** Elasticsearch connection by node client. */
@RequiredArgsConstructor
public class ElasticsearchNodeClient implements ElasticsearchClient {

/** Default types and field filter to match all. */
Expand All @@ -55,10 +53,20 @@ public class ElasticsearchNodeClient implements ElasticsearchClient {
private final NodeClient client;

/** Index name expression resolver to get concrete index name. */
private final IndexNameExpressionResolver resolver = new IndexNameExpressionResolver();
private final IndexNameExpressionResolver resolver;

private static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker";

/**
* Constructor of ElasticsearchNodeClient.
*/
public ElasticsearchNodeClient(ClusterService clusterService,
NodeClient client) {
this.clusterService = clusterService;
this.client = client;
this.resolver = new IndexNameExpressionResolver(client.threadPool().getThreadContext());
}

/**
* Get field mappings of index by an index expression. Majority is copied from legacy
* LocalClusterState.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
Expand Down Expand Up @@ -84,6 +85,9 @@ class ElasticsearchNodeClientTest {
@Mock
private SearchHit searchHit;

@Mock
private ThreadContext threadContext;

private ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id",
new ExprIntegerValue(1)));

Expand Down Expand Up @@ -198,6 +202,7 @@ public void search() {
void schedule() {
ThreadPool threadPool = mock(ThreadPool.class);
when(nodeClient.threadPool()).thenReturn(threadPool);
when(threadPool.getThreadContext()).thenReturn(threadContext);

doAnswer(
invocation -> {
Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# permissions and limitations under the License.
#

version=1.11.0
version=1.12.0
2 changes: 1 addition & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# permissions and limitations under the License.
#

distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStorePath=wrapper/dists
Expand Down
159 changes: 72 additions & 87 deletions integ-test/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ tasks.withType(licenseHeaders.class) {
}

validateNebulaPom.enabled = false
loggerUsageCheck.enabled = false

repositories {
mavenCentral()
Expand Down Expand Up @@ -63,14 +64,15 @@ compileTestJava {
}
}

tasks.integTest.dependsOn(':plugin:bundlePlugin', ':integ-test:integTestWithNewEngine')
testClusters.integTest {
testClusters.all {
testDistribution = 'oss'
plugin file(tasks.getByPath(':plugin:bundlePlugin').archiveFile)
plugin ":plugin"
}

// Run only legacy SQL ITs with new SQL engine disabled
integTest.runner {
integTest {
dependsOn (':plugin:bundlePlugin',':integ-test:integTestWithNewEngine')

systemProperty 'tests.security.manager', 'false'
systemProperty('project.root', project.projectDir.absolutePath)

Expand All @@ -95,112 +97,95 @@ integTest.runner {
// Run PPL ITs and new, legacy and comparison SQL ITs with new SQL engine enabled
task integTestWithNewEngine(type: RestIntegTestTask) {
dependsOn ':plugin:bundlePlugin'
runner {
systemProperty 'tests.security.manager', 'false'
systemProperty('project.root', project.projectDir.absolutePath)

systemProperty "https", System.getProperty("https")
systemProperty "user", System.getProperty("user")
systemProperty "password", System.getProperty("password")
systemProperty 'tests.security.manager', 'false'
systemProperty('project.root', project.projectDir.absolutePath)

// Enable new SQL engine
systemProperty 'enableNewEngine', 'true'
systemProperty "https", System.getProperty("https")
systemProperty "user", System.getProperty("user")
systemProperty "password", System.getProperty("password")

// Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for
// requests. The 'doFirst' delays reading the debug setting on the cluster till execution time.
doFirst { systemProperty 'cluster.debug', getDebug() }
// Enable new SQL engine
systemProperty 'enableNewEngine', 'true'

if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}
// Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for
// requests. The 'doFirst' delays reading the debug setting on the cluster till execution time.
doFirst { systemProperty 'cluster.debug', getDebug() }

exclude 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**'
if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

// Explain IT is dependent on internal implementation of old engine so it's not necessary
// to run these with new engine and not necessary to make this consistent with old engine.
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/ExplainIT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatterIT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/TermQueryExplainIT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**'

// Skip old semantic analyzer IT because analyzer in new engine has different behavior
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/QueryAnalysisIT.class'
// Explain IT is dependent on internal implementation of old engine so it's not necessary
// to run these with new engine and not necessary to make this consistent with old engine.
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/ExplainIT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatterIT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/TermQueryExplainIT.class'

// Skip this IT to avoid breaking tests due to inconsistency in JDBC schema
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/AggregationExpressionIT.class'
// Skip old semantic analyzer IT because analyzer in new engine has different behavior
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/QueryAnalysisIT.class'

// Skip this IT because all assertions are against explain output
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/OrderIT.class'
}
}
// Skip this IT to avoid breaking tests due to inconsistency in JDBC schema
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/AggregationExpressionIT.class'

testClusters.integTestWithNewEngine {
testDistribution = 'oss'
plugin file(tasks.getByPath(':plugin:bundlePlugin').archiveFile)
// Skip this IT because all assertions are against explain output
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/OrderIT.class'
}




task docTest(type: RestIntegTestTask) {
dependsOn ':plugin:bundlePlugin'
runner {
systemProperty 'tests.security.manager', 'false'
systemProperty('project.root', project.projectDir.absolutePath)

// Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for
// requests. The 'doFirst' delays reading the debug setting on the cluster till execution time.
doFirst { systemProperty 'cluster.debug', getDebug()}

if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

include 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/ppl/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/sql/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/**/*IT.class'

systemProperty 'tests.security.manager', 'false'
systemProperty('project.root', project.projectDir.absolutePath)

// Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for
// requests. The 'doFirst' delays reading the debug setting on the cluster till execution time.
doFirst { systemProperty 'cluster.debug', getDebug()}

if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}
}

testClusters.docTest {
testDistribution = 'oss'
plugin file(tasks.getByPath(':plugin:bundlePlugin').archiveFile)
include 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/ppl/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/sql/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/**/*IT.class'
}


task comparisonTest(type: RestIntegTestTask) {
dependsOn ':plugin:bundlePlugin'
runner {
systemProperty 'tests.security.manager', 'false'
systemProperty('project.root', project.projectDir.absolutePath)

// Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for
// requests. The 'doFirst' delays reading the debug setting on the cluster till execution time.
doFirst { systemProperty 'cluster.debug', getDebug()}

if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

include 'com/amazon/opendistroforelasticsearch/sql/correctness/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/ppl/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/**/*IT.class'

// Enable logging output to console
testLogging.showStandardStreams true

// Pass down system properties to IT class
systemProperty "esHost", System.getProperty("esHost")
systemProperty "dbUrl", System.getProperty("dbUrl")
systemProperty "otherDbUrls", System.getProperty("otherDbUrls")
systemProperty "queries", System.getProperty("queries")

systemProperty 'tests.security.manager', 'false'
systemProperty('project.root', project.projectDir.absolutePath)

// Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for
// requests. The 'doFirst' delays reading the debug setting on the cluster till execution time.
doFirst { systemProperty 'cluster.debug', getDebug()}

if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}
}

testClusters.comparisonTest {
testDistribution = 'oss'
plugin file(tasks.getByPath(':plugin:bundlePlugin').archiveFile)
include 'com/amazon/opendistroforelasticsearch/sql/correctness/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/ppl/**/*IT.class'
exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/**/*IT.class'

// Enable logging output to console
testLogging.showStandardStreams true

// Pass down system properties to IT class
systemProperty "esHost", System.getProperty("esHost")
systemProperty "dbUrl", System.getProperty("dbUrl")
systemProperty "otherDbUrls", System.getProperty("otherDbUrls")
systemProperty "queries", System.getProperty("queries")
}

task compileJdbc(type:Exec) {
Expand Down
2 changes: 1 addition & 1 deletion legacy/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ dependencies {

testCompile group: 'org.hamcrest', name: 'hamcrest-core', version:'2.2'
// testCompile group: 'com.alibaba', name: 'fastjson', version:'1.2.56'
testCompile group: 'org.mockito', name: 'mockito-core', version:'2.23.4'
testCompile group: 'org.mockito', name: 'mockito-core', version:'3.5.0'
testCompile group: 'junit', name: 'junit', version: '4.12'
testCompile group: "org.elasticsearch.client", name: 'transport', version: "${es_version}"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.LongBounds;
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
Expand Down Expand Up @@ -586,7 +586,7 @@ private DateHistogramAggregationBuilder dateHistogram(MethodField field) throws
case "extended_bounds":
String[] bounds = value.split(":");
if (bounds.length == 2) {
dateHistogram.extendedBounds(new ExtendedBounds(bounds[0], bounds[1]));
dateHistogram.extendedBounds(new LongBounds(bounds[0], bounds[1]));
}
break;

Expand Down
Loading