diff --git a/.github/draft-release-notes-config.yml b/.github/draft-release-notes-config.yml new file mode 100644 index 0000000000..8810497fef --- /dev/null +++ b/.github/draft-release-notes-config.yml @@ -0,0 +1,42 @@ +# The overall template of the release notes +template: | + Open Distro for Elasticsearch Version $RESOLVED_VERSION + $CHANGES + +# Setting the formatting and sorting for the release notes body +name-template: Version $RESOLVED_VERSION +change-template: "- $TITLE (PR [#$NUMBER](https://github.com/opendistro-for-elasticsearch/sql/pull/$NUMBER))" +sort-by: merged_at +sort-direction: ascending + +# Organizing the tagged PRs into categories +categories: + - title: "Version Upgrades" + labels: + - "version compatibility" + - title: "SQL and PPL" + labels: + - "SQL" + - "PPL" + - title: "SQL CLI" + labels: + - "CLI" + - title: "SQL JDBC" + labels: + - "JDBC" + - title: "SQL ODBC" + labels: + - "ODBC" + - title: "SQL Workbench" + labels: + - "Workbench" + - title: "Enhancements" + labels: + - "enhancement" + - "maintenance" + - title: "Bug Fixes" + labels: + - "bug" + - title: "Documentation" + labels: + - "documentation" diff --git a/.github/workflows/draft-release-notes-workflow.yml b/.github/workflows/draft-release-notes-workflow.yml new file mode 100644 index 0000000000..200d6a8569 --- /dev/null +++ b/.github/workflows/draft-release-notes-workflow.yml @@ -0,0 +1,21 @@ +name: Release Drafter + +on: + push: + branches: + - master + +jobs: + update_release_draft: + name: Update draft release notes + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - name: Update draft release notes + uses: release-drafter/release-drafter@v5 + with: + config-name: draft-release-notes-config.yml + tag: (None) + version: 1.9.0.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/sql-odbc-main.yml b/.github/workflows/sql-odbc-main.yml index e2e832e0fb..41ca029465 100644 --- a/.github/workflows/sql-odbc-main.yml +++ b/.github/workflows/sql-odbc-main.yml @@ -26,26 +26,16 @@ jobs: brew install curl brew install cmake brew install libiodbc - - name: aws-sdk-cpp-setup - run: | - sh aws_sdk_cpp_setup.sh - - name: configure - run: | - prefix_path=$(pwd) - mkdir cmake-build - cd cmake-build - cmake ../src -DCMAKE_INSTALL_PREFIX=$prefix_path/AWSSDK/ -DCMAKE_BUILD_TYPE=Release -DBUILD_ONLY="core" -DCUSTOM_MEMORY_MANAGEMENT="OFF" -DENABLE_RTTI="OFF" -DENABLE_TESTING="OFF" - cd .. - - name: build-driver + - name: configure-and-build-driver run: | - cmake --build cmake-build + ./build_mac_release64.sh #- name: test # run: | # bash ./run_test_runner.sh - name: build-installer if: success() run: | - cd cmake-build + cd cmake-build64 cmake ../src make cpack . @@ -59,7 +49,7 @@ jobs: cp ./lib64/*.dylib build cp ./lib64/*.a build cp $(ls -d bin64/* | grep -v "\.") build - cp ./cmake-build/*.pkg installer + cp ./cmake-build64/*.pkg installer # cp ./bin64/*.html test-output # cp ./bin64/*.log test-output - name: upload-build diff --git a/README.md b/README.md index dacd3bee47..12228c473f 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,17 @@ Open Distro for Elasticsearch enables you to extract insights out of Elasticsearch using the familiar SQL query syntax. Use aggregations, group by, and where clauses to investigate your data. Read your data as JSON documents or CSV tables so you have the flexibility to use the format that works best for you. + +## SQL Related Projects + +The following projects have been merged into this repository as separate folders as of July 9, 2020. Please refer to links below for details. This document will focus on the SQL plugin for Elasticsearch. + +* [SQL CLI](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-cli) +* [SQL JDBC](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-jdbc) +* [SQL ODBC](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-odbc) +* [SQL Workbench](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-workbench) + + ## Documentation Please refer to the [reference manual](./docs/user/index.rst) and [technical documentation](https://opendistro.github.io/for-elasticsearch-docs) for detailed information on installing and configuring opendistro-elasticsearch-sql plugin. Looking to contribute? Read the instructions on [Development Guide](./docs/developing.rst) and then submit a patch! diff --git a/build.gradle b/build.gradle index 2dcf9726a1..f45ea2b1a7 100644 --- a/build.gradle +++ b/build.gradle @@ -48,7 +48,7 @@ ext { } allprojects { - version = "${opendistroVersion}.0" + version = "${opendistroVersion}.1" plugins.withId('java') { sourceCompatibility = targetCompatibility = "1.8" diff --git a/common/build.gradle b/common/build.gradle index 0849fae080..2b3f09a883 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -9,7 +9,8 @@ repositories { dependencies { compile "org.antlr:antlr4-runtime:4.7.1" - compile group: 'com.google.guava', name: 'guava', version: '23.0' + // https://github.com/google/guava/wiki/CVE-2018-10237 + compile group: 'com.google.guava', name: 'guava', version: '29.0-jre' testCompile group: 'junit', name: 'junit', version: '4.12' } diff --git a/core/build.gradle b/core/build.gradle index acea197aec..53a66c1c10 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -9,7 +9,8 @@ repositories { } dependencies { - compile group: 'com.google.guava', name: 'guava', version: '23.0' + // https://github.com/google/guava/wiki/CVE-2018-10237 + compile group: 'com.google.guava', name: 'guava', version: '29.0-jre' compile group: 'org.springframework', name: 'spring-context', version: '5.2.5.RELEASE' compile group: 'org.springframework', name: 'spring-beans', version: '5.2.5.RELEASE' compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.10' diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java index dd1081bf52..2f9710b7b6 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java @@ -78,7 +78,8 @@ public Expression visitEqualTo(EqualTo node, AnalysisContext context) { @Override public Expression visitLiteral(Literal node, AnalysisContext context) { - return DSL.literal(ExprValueUtils.fromObjectValue(node.getValue())); + return DSL + .literal(ExprValueUtils.fromObjectValue(node.getValue(), node.getType().getCoreType())); } @Override diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java index b5096b4e60..4c62c119d4 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java @@ -123,6 +123,18 @@ public static Literal intLiteral(Integer value) { return literal(value, DataType.INTEGER); } + public static Literal dateLiteral(String value) { + return literal(value, DataType.DATE); + } + + public static Literal timeLiteral(String value) { + return literal(value, DataType.TIME); + } + + public static Literal timestampLiteral(String value) { + return literal(value, DataType.TIMESTAMP); + } + public static Literal doubleLiteral(Double value) { return literal(value, DataType.DOUBLE); } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java index 51662e6c53..4befa380c0 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java @@ -15,12 +15,28 @@ package com.amazon.opendistroforelasticsearch.sql.ast.expression; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +/** + * The DataType defintion in AST. + * Question, could we use {@link ExprCoreType} directly in AST? + */ +@RequiredArgsConstructor public enum DataType { - TYPE_ERROR, - NULL, + TYPE_ERROR(ExprCoreType.UNKNOWN), + NULL(ExprCoreType.UNKNOWN), + + INTEGER(ExprCoreType.INTEGER), + DOUBLE(ExprCoreType.DOUBLE), + STRING(ExprCoreType.STRING), + BOOLEAN(ExprCoreType.BOOLEAN), + + DATE(ExprCoreType.DATE), + TIME(ExprCoreType.TIME), + TIMESTAMP(ExprCoreType.TIMESTAMP); - INTEGER, - DOUBLE, - STRING, - BOOLEAN + @Getter + private final ExprCoreType coreType; } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprNumberValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprNumberValue.java new file mode 100644 index 0000000000..e5214aa8a5 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprNumberValue.java @@ -0,0 +1,54 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.data.model; + +import com.google.common.base.Objects; +import lombok.RequiredArgsConstructor; + +/** + * Expression Number Value. + */ +@RequiredArgsConstructor +public abstract class AbstractExprNumberValue extends AbstractExprValue { + private final Number value; + + @Override + public Integer integerValue() { + return value.intValue(); + } + + @Override + public Long longValue() { + return value.longValue(); + } + + @Override + public Float floatValue() { + return value.floatValue(); + } + + @Override + public Double doubleValue() { + return value.doubleValue(); + } + + @Override + public int hashCode() { + return Objects.hashCode(value); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprValue.java new file mode 100644 index 0000000000..1e6e51a336 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/AbstractExprValue.java @@ -0,0 +1,80 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.data.model; + +import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; + +/** + * Abstract ExprValue. + */ +public abstract class AbstractExprValue implements ExprValue { + /** + * The customize compareTo logic. + */ + @Override + public int compareTo(ExprValue other) { + if (this.isNull() || this.isMissing()) { + return this.compare(other); + } else if (other.isNull() || other.isMissing()) { + return -other.compareTo(this); + } + if (!this.type().equals(other.type())) { + throw new ExpressionEvaluationException( + String.format( + "compare expected value have same type, but with [%s, %s]", + this.type(), other.type())); + } + return compare(other); + } + + /** + * The customize equals logic. + * The table below list the NULL and MISSING handling logic. + * A B A == B + * NULL NULL TRUE + * NULL MISSING FALSE + * MISSING NULL FALSE + * MISSING MISSING TRUE + */ + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (!(o instanceof ExprValue)) { + return false; + } + ExprValue other = (ExprValue) o; + if (this.isNull() || this.isMissing()) { + return equal(other); + } else if (other.isNull() || other.isMissing()) { + return other.equals(this); + } else { + return equal(other); + } + } + + /** + * The expression value compare. + */ + public abstract int compare(ExprValue other); + + /** + * The expression value equal. + */ + public abstract boolean equal(ExprValue other); +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprBooleanValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprBooleanValue.java index 4d2abeb09a..dd0e0805fc 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprBooleanValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprBooleanValue.java @@ -16,12 +16,16 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.google.common.base.Objects; import lombok.EqualsAndHashCode; -@EqualsAndHashCode -public class ExprBooleanValue implements ExprValue { - private static final ExprValue TRUE = new ExprBooleanValue(true); - private static final ExprValue FALSE = new ExprBooleanValue(false); +/** + * Expression Boolean Value. + */ +public class ExprBooleanValue extends AbstractExprValue { + private static final ExprBooleanValue TRUE = new ExprBooleanValue(true); + private static final ExprBooleanValue FALSE = new ExprBooleanValue(false); private final Boolean value; @@ -29,12 +33,8 @@ private ExprBooleanValue(Boolean value) { this.value = value; } - public static ExprValue ofTrue() { - return TRUE; - } - - public static ExprValue ofFalse() { - return FALSE; + public static ExprBooleanValue of(Boolean value) { + return value ? TRUE : FALSE; } @Override @@ -43,12 +43,32 @@ public Object value() { } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.BOOLEAN; } + @Override + public Boolean booleanValue() { + return value; + } + @Override public String toString() { return value.toString(); } + + @Override + public int compare(ExprValue other) { + return Boolean.compare(value, other.booleanValue()); + } + + @Override + public boolean equal(ExprValue other) { + return value.equals(other.booleanValue()); + } + + @Override + public int hashCode() { + return Objects.hashCode(value); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValue.java index aa4053cef5..4d71e260e2 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValue.java @@ -16,14 +16,20 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.google.common.base.Objects; +import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.RequiredArgsConstructor; -@EqualsAndHashCode +/** + * Expression Collection Value. + */ @RequiredArgsConstructor -public class ExprCollectionValue implements ExprValue { +public class ExprCollectionValue extends AbstractExprValue { private final List valueList; @Override @@ -32,14 +38,52 @@ public Object value() { } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.ARRAY; } + @Override + public List collectionValue() { + return valueList; + } + @Override public String toString() { return valueList.stream() .map(Object::toString) .collect(Collectors.joining(",", "[", "]")); } + + @Override + public boolean equal(ExprValue o) { + if (!(o instanceof ExprCollectionValue)) { + return false; + } else { + ExprCollectionValue other = (ExprCollectionValue) o; + Iterator thisIterator = this.valueList.iterator(); + Iterator otherIterator = other.valueList.iterator(); + + while (thisIterator.hasNext() && otherIterator.hasNext()) { + ExprValue thisEntry = thisIterator.next(); + ExprValue otherEntry = otherIterator.next(); + if (!thisEntry.equals(otherEntry)) { + return false; + } + } + return !(thisIterator.hasNext() || otherIterator.hasNext()); + } + } + + /** + * Only compare the size of the list. + */ + @Override + public int compare(ExprValue other) { + return Integer.compare(valueList.size(), other.collectionValue().size()); + } + + @Override + public int hashCode() { + return Objects.hashCode(valueList); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDateValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDateValue.java new file mode 100644 index 0000000000..52394dcd72 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDateValue.java @@ -0,0 +1,90 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.data.model; + +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; +import com.google.common.base.Objects; +import java.time.Instant; +import java.time.LocalDate; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import lombok.RequiredArgsConstructor; + +/** + * Expression Date Value. + */ +@RequiredArgsConstructor +public class ExprDateValue extends AbstractExprValue { + /** + * todo. only support UTC now. + */ + private static final ZoneId ZONE = ZoneId.of("UTC"); + private final Instant date; + + /** + * Constructor of ExprDateValue. + */ + public ExprDateValue(String date) { + try { + LocalDate localDate = LocalDate.parse(date); + this.date = localDate.atStartOfDay(ZONE).toInstant(); + } catch (DateTimeParseException e) { + throw new SemanticCheckException(String.format("date:%s in unsupported format, please use " + + "yyyy-MM-dd", date)); + } + } + + @Override + public String value() { + return DateTimeFormatter.ISO_LOCAL_DATE.withZone(ZONE).format(date); + } + + @Override + public ExprType type() { + return ExprCoreType.DATE; + } + + @Override + public ZonedDateTime dateValue() { + return date.atZone(ZONE); + } + + @Override + public String toString() { + return String.format("DATE '%s'", value()); + } + + @Override + public int compare(ExprValue other) { + return date.compareTo(other.dateValue().toInstant()); + } + + @Override + public boolean equal(ExprValue other) { + return date.atZone(ZONE).equals(other.dateValue()); + } + + @Override + public int hashCode() { + return Objects.hashCode(date); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDoubleValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDoubleValue.java index c464cbcfe3..a6e4347123 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDoubleValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDoubleValue.java @@ -16,26 +16,39 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; -@EqualsAndHashCode -@RequiredArgsConstructor -public class ExprDoubleValue implements ExprValue { - private final Double value; +/** + * Expression Double Value. + */ +public class ExprDoubleValue extends AbstractExprNumberValue { + + public ExprDoubleValue(Number value) { + super(value); + } @Override public Object value() { - return value; + return doubleValue(); } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.DOUBLE; } @Override public String toString() { - return value.toString(); + return doubleValue().toString(); + } + + @Override + public int compare(ExprValue other) { + return Double.compare(doubleValue(), other.doubleValue()); + } + + @Override + public boolean equal(ExprValue other) { + return doubleValue().equals(other.doubleValue()); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprFloatValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprFloatValue.java index 37c44058f0..7d364fe6e0 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprFloatValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprFloatValue.java @@ -16,26 +16,39 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; -@EqualsAndHashCode -@RequiredArgsConstructor -public class ExprFloatValue implements ExprValue { - private final Float value; +/** + * Expression Float Value. + */ +public class ExprFloatValue extends AbstractExprNumberValue { + + public ExprFloatValue(Number value) { + super(value); + } @Override public Object value() { - return value; + return floatValue(); } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.FLOAT; } @Override public String toString() { - return value.toString(); + return floatValue().toString(); + } + + @Override + public int compare(ExprValue other) { + return Float.compare(floatValue(), other.floatValue()); + } + + @Override + public boolean equal(ExprValue other) { + return floatValue().equals(other.floatValue()); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprIntegerValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprIntegerValue.java index 19d5462e78..e600196af2 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprIntegerValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprIntegerValue.java @@ -16,26 +16,39 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; -@EqualsAndHashCode -@RequiredArgsConstructor -public class ExprIntegerValue implements ExprValue { - private final Integer value; +/** + * Expression Integer Value. + */ +public class ExprIntegerValue extends AbstractExprNumberValue { + + public ExprIntegerValue(Number value) { + super(value); + } @Override public Object value() { - return value; + return integerValue(); } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.INTEGER; } @Override public String toString() { - return value.toString(); + return integerValue().toString(); + } + + @Override + public int compare(ExprValue other) { + return Integer.compare(integerValue(), other.integerValue()); + } + + @Override + public boolean equal(ExprValue other) { + return integerValue().equals(other.integerValue()); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprLongValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprLongValue.java index 730a7c406e..0191376a10 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprLongValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprLongValue.java @@ -16,26 +16,40 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.google.common.base.Objects; -@EqualsAndHashCode -@RequiredArgsConstructor -public class ExprLongValue implements ExprValue { - private final Long value; +/** + * Expression Long Value. + */ +public class ExprLongValue extends AbstractExprNumberValue { + + public ExprLongValue(Number value) { + super(value); + } @Override public Object value() { - return value; + return longValue(); } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.LONG; } @Override public String toString() { - return value.toString(); + return longValue().toString(); + } + + @Override + public int compare(ExprValue other) { + return Long.compare(longValue(), other.longValue()); + } + + @Override + public boolean equal(ExprValue other) { + return longValue().equals(other.longValue()); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java index 3efe25f06c..7eb29ab8f0 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java @@ -15,15 +15,15 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; -import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; -import lombok.EqualsAndHashCode; +import java.util.Objects; /** - * The definition of the expression missing value. + * Expression Missing Value. + * Missing value only equal to missing value, and is smaller than any other value. */ -@EqualsAndHashCode -public class ExprMissingValue implements ExprValue { +public class ExprMissingValue extends AbstractExprValue { private static final ExprValue instance = new ExprMissingValue(); private ExprMissingValue() { @@ -39,7 +39,7 @@ public Object value() { } @Override - public ExprCoreType type() { + public ExprType type() { throw new ExpressionEvaluationException("invalid to call type operation on missing value"); } @@ -47,4 +47,27 @@ public ExprCoreType type() { public boolean isMissing() { return true; } + + /** + * When MISSING value compare to other expression value. + * 1) MISSING is equal to MISSING. + * 2) MISSING is less than all other expression values. + */ + @Override + public int compare(ExprValue other) { + return other.isMissing() ? 0 : -1; + } + + /** + * Missing value is equal to Missing value. + */ + @Override + public boolean equal(ExprValue other) { + return other.isMissing(); + } + + @Override + public int hashCode() { + return Objects.hashCode("MISSING"); + } } \ No newline at end of file diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java index b0b509c9e2..64d035e82f 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java @@ -15,20 +15,28 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; -import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; -import lombok.EqualsAndHashCode; +import java.util.Objects; /** - * The definition of the expression null value. + * Expression Null Value. + * Null value + *
  • equal to null value. + *
  • large than missing value. + *
  • less than any other value. */ -@EqualsAndHashCode -public class ExprNullValue implements ExprValue { +public class ExprNullValue extends AbstractExprValue { private static final ExprValue instance = new ExprNullValue(); private ExprNullValue() { } + @Override + public int hashCode() { + return Objects.hashCode("NULL"); + } + public static ExprValue of() { return instance; } @@ -39,7 +47,7 @@ public Object value() { } @Override - public ExprCoreType type() { + public ExprType type() { throw new ExpressionEvaluationException("invalid to call type operation on null value"); } @@ -47,4 +55,24 @@ public ExprCoreType type() { public boolean isNull() { return true; } + + /** + * When NULL value compare to other expression value. + * 1) NULL is equal to NULL. + * 2) NULL is large than MISSING. + * 3) NULL is less than all other expression values. + */ + @Override + public int compare(ExprValue other) { + return other.isNull() ? 0 : other.isMissing() ? 1 : -1; + } + + /** + * NULL value is equal to NULL value. + */ + @Override + public boolean equal(ExprValue other) { + return other.isNull(); + } + } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprStringValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprStringValue.java index a06af303b5..8ea3e0f60d 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprStringValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprStringValue.java @@ -16,12 +16,16 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import java.util.Objects; import lombok.EqualsAndHashCode; import lombok.RequiredArgsConstructor; -@EqualsAndHashCode +/** + * Expression String Value. + */ @RequiredArgsConstructor -public class ExprStringValue implements ExprValue { +public class ExprStringValue extends AbstractExprValue { private final String value; @Override @@ -30,12 +34,32 @@ public Object value() { } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.STRING; } + @Override + public String stringValue() { + return value; + } + @Override public String toString() { return String.format("\"%s\"", value); } + + @Override + public int compare(ExprValue other) { + return value.compareTo(other.stringValue()); + } + + @Override + public boolean equal(ExprValue other) { + return value.equals(other.stringValue()); + } + + @Override + public int hashCode() { + return Objects.hashCode(value); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimeValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimeValue.java new file mode 100644 index 0000000000..22917a80eb --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimeValue.java @@ -0,0 +1,88 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.data.model; + +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.util.Objects; +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; + +/** + * Expression Time Value. + */ +@RequiredArgsConstructor +public class ExprTimeValue extends AbstractExprValue { + /** + * todo. only support UTC now. + */ + private static final ZoneId ZONE = ZoneId.of("UTC"); + private final LocalTime time; + + /** + * Constructor. + */ + public ExprTimeValue(String time) { + try { + this.time = LocalTime.parse(time); + } catch (DateTimeParseException e) { + throw new SemanticCheckException(String.format("time:%s in unsupported format, please use " + + "HH:mm:ss", time)); + } + } + + @Override + public String value() { + return DateTimeFormatter.ISO_LOCAL_TIME.withZone(ZONE).format(time); + } + + @Override + public ExprType type() { + return ExprCoreType.TIME; + } + + @Override + public LocalTime timeValue() { + return time; + } + + @Override + public String toString() { + return String.format("TIME '%s'", value()); + } + + @Override + public int compare(ExprValue other) { + return time.compareTo(other.timeValue()); + } + + @Override + public boolean equal(ExprValue other) { + return time.equals(other.timeValue()); + } + + @Override + public int hashCode() { + return Objects.hashCode(time); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimestampValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimestampValue.java new file mode 100644 index 0000000000..2904457598 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimestampValue.java @@ -0,0 +1,96 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.data.model; + +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoUnit; +import java.util.Objects; +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; + +/** + * Expression Timestamp Value. + */ +@RequiredArgsConstructor +public class ExprTimestampValue extends AbstractExprValue { + /** + * todo. only support UTC now. + */ + private static final ZoneId ZONE = ZoneId.of("UTC"); + /** + * todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. + */ + private static final DateTimeFormatter FORMATTER = DateTimeFormatter + .ofPattern("yyyy-MM-dd HH:mm:ss"); + private final Instant timestamp; + + /** + * Constructor. + */ + public ExprTimestampValue(String timestamp) { + try { + this.timestamp = LocalDateTime.parse(timestamp, FORMATTER).atZone(ZONE).toInstant(); + } catch (DateTimeParseException e) { + throw new SemanticCheckException(String.format("timestamp:%s in unsupported format, please " + + "use yyyy-MM-dd HH:mm:ss", timestamp)); + } + + } + + @Override + public String value() { + return FORMATTER.withZone(ZONE).format(timestamp.truncatedTo(ChronoUnit.SECONDS)); + } + + @Override + public ExprType type() { + return ExprCoreType.TIMESTAMP; + } + + @Override + public Instant timestampValue() { + return timestamp; + } + + @Override + public String toString() { + return String.format("TIMESTAMP '%s'", value()); + } + + @Override + public int compare(ExprValue other) { + return timestamp.compareTo(other.timestampValue()); + } + + @Override + public boolean equal(ExprValue other) { + return timestamp.equals(other.timestampValue()); + } + + @Override + public int hashCode() { + return Objects.hashCode(timestamp); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java index 92df972f66..41c702a255 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java @@ -16,17 +16,22 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.storage.bindingtuple.BindingTuple; import com.amazon.opendistroforelasticsearch.sql.storage.bindingtuple.LazyBindingTuple; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; +/** + * Expression Tuple Value. + */ @RequiredArgsConstructor -public class ExprTupleValue implements ExprValue { +public class ExprTupleValue extends AbstractExprValue { private final LinkedHashMap valueMap; @@ -41,7 +46,7 @@ public Object value() { } @Override - public ExprCoreType type() { + public ExprType type() { return ExprCoreType.STRUCT; } @@ -59,21 +64,27 @@ public BindingTuple bindingTuples() { bindingName -> valueMap.getOrDefault(bindingName, ExprMissingValue.of())); } + @Override + public Map tupleValue() { + return valueMap; + } + /** * Override the equals method. * @return true for equal, otherwise false. */ - public boolean equals(Object o) { - if (o == this) { - return true; - } else if (!(o instanceof ExprTupleValue)) { + public boolean equal(ExprValue o) { + if (!(o instanceof ExprTupleValue)) { return false; } else { ExprTupleValue other = (ExprTupleValue) o; Iterator> thisIterator = this.valueMap.entrySet().iterator(); Iterator> otherIterator = other.valueMap.entrySet().iterator(); while (thisIterator.hasNext() && otherIterator.hasNext()) { - if (!thisIterator.next().equals(otherIterator.next())) { + Entry thisEntry = thisIterator.next(); + Entry otherEntry = otherIterator.next(); + if (!(thisEntry.getKey().equals(otherEntry.getKey()) + && thisEntry.getValue().equals(otherEntry.getValue()))) { return false; } } @@ -81,4 +92,16 @@ public boolean equals(Object o) { } } + /** + * Only compare the size of the map. + */ + @Override + public int compare(ExprValue other) { + return Integer.compare(valueMap.size(), other.tupleValue().size()); + } + + @Override + public int hashCode() { + return Objects.hashCode(valueMap); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java index 904b1aea09..fc005a3301 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java @@ -16,12 +16,20 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import com.amazon.opendistroforelasticsearch.sql.storage.bindingtuple.BindingTuple; +import java.io.Serializable; +import java.time.Instant; +import java.time.LocalTime; +import java.time.ZonedDateTime; +import java.util.List; +import java.util.Map; /** * The definition of the Expression Value. */ -public interface ExprValue { +public interface ExprValue extends Serializable, Comparable { /** * Get the Object value of the Expression Value. */ @@ -30,7 +38,7 @@ public interface ExprValue { /** * Get the {@link ExprCoreType} of the Expression Value. */ - ExprCoreType type(); + ExprType type(); /** * Is null value. @@ -56,4 +64,92 @@ default boolean isMissing() { default BindingTuple bindingTuples() { return BindingTuple.EMPTY; } + + /** + * Get integer value. + */ + default Integer integerValue() { + throw new ExpressionEvaluationException( + "invalid to get integerValue from value of type " + type()); + } + + /** + * Get long value. + */ + default Long longValue() { + throw new ExpressionEvaluationException( + "invalid to get longValue from value of type " + type()); + } + + /** + * Get float value. + */ + default Float floatValue() { + throw new ExpressionEvaluationException( + "invalid to get floatValue from value of type " + type()); + } + + /** + * Get float value. + */ + default Double doubleValue() { + throw new ExpressionEvaluationException( + "invalid to get doubleValue from value of type " + type()); + } + + /** + * Get string value. + */ + default String stringValue() { + throw new ExpressionEvaluationException( + "invalid to get stringValue from value of type " + type()); + } + + /** + * Get boolean value. + */ + default Boolean booleanValue() { + throw new ExpressionEvaluationException( + "invalid to get booleanValue from value of type " + type()); + } + + /** + * Get timestamp value. + */ + default Instant timestampValue() { + throw new ExpressionEvaluationException( + "invalid to get timestampValue from value of type " + type()); + } + + /** + * Get time value. + */ + default LocalTime timeValue() { + throw new ExpressionEvaluationException( + "invalid to get timeValue from value of type " + type()); + } + + /** + * Get date value. + */ + default ZonedDateTime dateValue() { + throw new ExpressionEvaluationException( + "invalid to get dateValue from value of type " + type()); + } + + /** + * Get map value. + */ + default Map tupleValue() { + throw new ExpressionEvaluationException( + "invalid to get tupleValue from value of type " + type()); + } + + /** + * Get collection value. + */ + default List collectionValue() { + throw new ExpressionEvaluationException( + "invalid to get collectionValue from value of type " + type()); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtils.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtils.java index 1f601989fa..c8f9e30d4f 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtils.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtils.java @@ -23,6 +23,7 @@ import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import com.google.common.annotations.VisibleForTesting; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; @@ -34,8 +35,8 @@ */ @UtilityClass public class ExprValueUtils { - public static final ExprValue LITERAL_TRUE = ExprBooleanValue.ofTrue(); - public static final ExprValue LITERAL_FALSE = ExprBooleanValue.ofFalse(); + public static final ExprValue LITERAL_TRUE = ExprBooleanValue.of(true); + public static final ExprValue LITERAL_FALSE = ExprBooleanValue.of(false); public static final ExprValue LITERAL_NULL = ExprNullValue.of(); public static final ExprValue LITERAL_MISSING = ExprMissingValue.of(); @@ -117,65 +118,58 @@ public static ExprValue fromObjectValue(Object o) { } } + /** + * Construct ExprValue from Object with ExprCoreType. + */ + public static ExprValue fromObjectValue(Object o, ExprCoreType type) { + switch (type) { + case TIMESTAMP: + return new ExprTimestampValue((String)o); + case DATE: + return new ExprDateValue((String)o); + case TIME: + return new ExprTimeValue((String)o); + default: + return fromObjectValue(o); + } + } + public static Integer getIntegerValue(ExprValue exprValue) { - return getNumberValue(exprValue).intValue(); + return exprValue.integerValue(); } public static Double getDoubleValue(ExprValue exprValue) { - return getNumberValue(exprValue).doubleValue(); + return exprValue.doubleValue(); } public static Long getLongValue(ExprValue exprValue) { - return getNumberValue(exprValue).longValue(); + return exprValue.longValue(); } public static Float getFloatValue(ExprValue exprValue) { - return getNumberValue(exprValue).floatValue(); + return exprValue.floatValue(); } public static String getStringValue(ExprValue exprValue) { - return convert(exprValue, STRING); + return exprValue.stringValue(); } public static List getCollectionValue(ExprValue exprValue) { - return convert(exprValue, ARRAY); + return exprValue.collectionValue(); } public static Map getTupleValue(ExprValue exprValue) { - return convert(exprValue, STRUCT); + return exprValue.tupleValue(); } public static Boolean getBooleanValue(ExprValue exprValue) { - return convert(exprValue, BOOLEAN); + return exprValue.booleanValue(); } /** - * Get Number Value from {@link ExprValue}. + * Get {@link ZonedDateTime} from ExprValue of Date type. */ - @VisibleForTesting - public static Number getNumberValue(ExprValue exprValue) { - switch (exprValue.type()) { - case INTEGER: - case DOUBLE: - case LONG: - case FLOAT: - return (Number) exprValue.value(); - default: - break; - } - throw new ExpressionEvaluationException( - String - .format("invalid to getNumberValue with expression has type of %s", exprValue.type())); - } - - @SuppressWarnings("unchecked") - private static T convert(ExprValue exprValue, ExprCoreType toType) { - if (exprValue.type() == toType) { - return (T) exprValue.value(); - } else { - throw new ExpressionEvaluationException( - String.format("invalid to convert expression with type:%s to type:%s", exprValue.type(), - toType)); - } + public static ZonedDateTime getDateValue(ExprValue exprValue) { + return exprValue.dateValue(); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java index 1e0d67149e..0b8312afb5 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java @@ -19,6 +19,7 @@ import java.util.Arrays; import java.util.List; +import java.util.stream.Collectors; /** * Expression Type. @@ -87,4 +88,12 @@ public List getParent() { public String typeName() { return this.name(); } + + /** + * Retrun all the valid ExprCoreType. + */ + public static List coreTypes() { + return Arrays.stream(ExprCoreType.values()).filter(type -> type != UNKNOWN) + .collect(Collectors.toList()); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/utils/NaturalExprValueOrdering.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/utils/NaturalExprValueOrdering.java index 0aeb2562d2..83534f5013 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/utils/NaturalExprValueOrdering.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/utils/NaturalExprValueOrdering.java @@ -15,21 +15,7 @@ package com.amazon.opendistroforelasticsearch.sql.data.utils; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getBooleanValue; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getCollectionValue; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getDoubleValue; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getFloatValue; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getIntegerValue; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getLongValue; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getStringValue; -import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getTupleValue; -import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.COMPARE_WITH_NULL_OR_MISSING; -import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.LIST_COMPARATOR; -import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.MAP_COMPARATOR; -import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.STRING_COMPARATOR; - import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; -import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import com.google.common.collect.Ordering; /** @@ -45,36 +31,7 @@ public class NaturalExprValueOrdering extends ExprValueOrdering { @Override public int compare(ExprValue left, ExprValue right) { - if (COMPARE_WITH_NULL_OR_MISSING.test(left, right)) { - throw new ExpressionEvaluationException("compare with null or missing value is invalid"); - } - if (!left.type().equals(right.type())) { - throw new ExpressionEvaluationException( - String.format( - "compare expected value have same type, but with [%s, %s]", - left.type(), right.type())); - } - switch (left.type()) { - case DOUBLE: - return Double.compare(getDoubleValue(left), getDoubleValue(right)); - case FLOAT: - return Float.compare(getFloatValue(left), getFloatValue(right)); - case LONG: - return Long.compare(getLongValue(left), getLongValue(right)); - case INTEGER: - return Integer.compare(getIntegerValue(left), getIntegerValue(right)); - case BOOLEAN: - return Boolean.compare(getBooleanValue(left), getBooleanValue(right)); - case STRING: - return STRING_COMPARATOR.apply(getStringValue(left), getStringValue(right)); - case STRUCT: - return MAP_COMPARATOR.apply(getTupleValue(left), getTupleValue(right)); - case ARRAY: - return LIST_COMPARATOR.apply(getCollectionValue(left), getCollectionValue(right)); - default: - throw new ExpressionEvaluationException( - String.format("compare doesn't support type [%s]", left.type())); - } + return left.compareTo(right); } @Override diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java index 7af871dc53..34a648b385 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java @@ -19,7 +19,6 @@ import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.Aggregator; -import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; import java.util.Arrays; @@ -57,107 +56,237 @@ public static LiteralExpression literal(ExprValue value) { return new LiteralExpression(value); } + /** + * Wrap a number to {@link LiteralExpression}. + */ + public static LiteralExpression literal(Number value) { + if (value instanceof Integer) { + return new LiteralExpression(ExprValueUtils.integerValue(value.intValue())); + } else if (value instanceof Long) { + return new LiteralExpression(ExprValueUtils.longValue(value.longValue())); + } else if (value instanceof Float) { + return new LiteralExpression(ExprValueUtils.floatValue(value.floatValue())); + } else { + return new LiteralExpression(ExprValueUtils.doubleValue(value.doubleValue())); + } + } + public static ReferenceExpression ref(String ref, ExprType type) { return new ReferenceExpression(ref, type); } public FunctionExpression abs(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.ABS.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.ABS, expressions); + } + + public FunctionExpression ceil(Expression... expressions) { + return function(BuiltinFunctionName.CEIL, expressions); + } + + public FunctionExpression ceiling(Expression... expressions) { + return function(BuiltinFunctionName.CEILING, expressions); + } + + public FunctionExpression conv(Expression... expressions) { + return function(BuiltinFunctionName.CONV, expressions); + } + + public FunctionExpression crc32(Expression... expressions) { + return function(BuiltinFunctionName.CRC32, expressions); + } + + public FunctionExpression euler(Expression... expressions) { + return function(BuiltinFunctionName.E, expressions); + } + + public FunctionExpression exp(Expression... expressions) { + return function(BuiltinFunctionName.EXP, expressions); + } + + public FunctionExpression floor(Expression... expressions) { + return function(BuiltinFunctionName.FLOOR, expressions); + } + + public FunctionExpression ln(Expression... expressions) { + return function(BuiltinFunctionName.LN, expressions); + } + + public FunctionExpression log(Expression... expressions) { + return function(BuiltinFunctionName.LOG, expressions); + } + + public FunctionExpression log10(Expression... expressions) { + return function(BuiltinFunctionName.LOG10, expressions); + } + + public FunctionExpression log2(Expression... expressions) { + return function(BuiltinFunctionName.LOG2, expressions); + } + + public FunctionExpression mod(Expression... expressions) { + return function(BuiltinFunctionName.MOD, expressions); + } + + public FunctionExpression pi(Expression... expressions) { + return function(BuiltinFunctionName.PI, expressions); + } + + public FunctionExpression pow(Expression... expressions) { + return function(BuiltinFunctionName.POW, expressions); + } + + public FunctionExpression power(Expression... expressions) { + return function(BuiltinFunctionName.POWER, expressions); + } + + public FunctionExpression rand(Expression... expressions) { + return function(BuiltinFunctionName.RAND, expressions); + } + + public FunctionExpression round(Expression... expressions) { + return function(BuiltinFunctionName.ROUND, expressions); + } + + public FunctionExpression sign(Expression... expressions) { + return function(BuiltinFunctionName.SIGN, expressions); + } + + public FunctionExpression sqrt(Expression... expressions) { + return function(BuiltinFunctionName.SQRT, expressions); + } + + public FunctionExpression truncate(Expression... expressions) { + return function(BuiltinFunctionName.TRUNCATE, expressions); + } + + public FunctionExpression acos(Expression... expressions) { + return function(BuiltinFunctionName.ACOS, expressions); + } + + public FunctionExpression asin(Expression... expressions) { + return function(BuiltinFunctionName.ASIN, expressions); + } + + public FunctionExpression atan(Expression... expressions) { + return function(BuiltinFunctionName.ATAN, expressions); + } + + public FunctionExpression atan2(Expression... expressions) { + return function(BuiltinFunctionName.ATAN2, expressions); + } + + public FunctionExpression cos(Expression... expressions) { + return function(BuiltinFunctionName.COS, expressions); + } + + public FunctionExpression cot(Expression... expressions) { + return function(BuiltinFunctionName.COT, expressions); + } + + public FunctionExpression degrees(Expression... expressions) { + return function(BuiltinFunctionName.DEGREES, expressions); + } + + public FunctionExpression radians(Expression... expressions) { + return function(BuiltinFunctionName.RADIANS, expressions); + } + + public FunctionExpression sin(Expression... expressions) { + return function(BuiltinFunctionName.SIN, expressions); + } + + public FunctionExpression tan(Expression... expressions) { + return function(BuiltinFunctionName.TAN, expressions); } public FunctionExpression add(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.ADD.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.ADD, expressions); } public FunctionExpression subtract(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.SUBTRACT.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.SUBTRACT, expressions); } public FunctionExpression multiply(Expression... expressions) { + return function(BuiltinFunctionName.MULTIPLY, expressions); + } + + public FunctionExpression dayofmonth(Expression... expressions) { return (FunctionExpression) - repository.compile(BuiltinFunctionName.MULTIPLY.getName(), Arrays.asList(expressions)); + repository.compile(BuiltinFunctionName.DAYOFMONTH.getName(), Arrays.asList(expressions)); } public FunctionExpression divide(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.DIVIDE.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.DIVIDE, expressions); } public FunctionExpression module(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.MODULES.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.MODULES, expressions); } public FunctionExpression and(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.AND.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.AND, expressions); } public FunctionExpression or(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.OR.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.OR, expressions); } public FunctionExpression xor(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.XOR.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.XOR, expressions); } public FunctionExpression not(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.NOT.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.NOT, expressions); } public FunctionExpression equal(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.EQUAL.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.EQUAL, expressions); } public FunctionExpression notequal(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.NOTEQUAL.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.NOTEQUAL, expressions); } public FunctionExpression less(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.LESS.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.LESS, expressions); } public FunctionExpression lte(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.LTE.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.LTE, expressions); } public FunctionExpression greater(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.GREATER.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.GREATER, expressions); } public FunctionExpression gte(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.GTE.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.GTE, expressions); } public FunctionExpression like(Expression... expressions) { - return (FunctionExpression) - repository.compile(BuiltinFunctionName.LIKE.getName(), Arrays.asList(expressions)); + return function(BuiltinFunctionName.LIKE, expressions); } public Aggregator avg(Expression... expressions) { - return (Aggregator) - repository.compile(BuiltinFunctionName.AVG.getName(), Arrays.asList(expressions)); + return aggregate(BuiltinFunctionName.AVG, expressions); } public Aggregator sum(Expression... expressions) { - return (Aggregator) - repository.compile(BuiltinFunctionName.SUM.getName(), Arrays.asList(expressions)); + return aggregate(BuiltinFunctionName.SUM, expressions); } public Aggregator count(Expression... expressions) { - return (Aggregator) - repository.compile(BuiltinFunctionName.COUNT.getName(), Arrays.asList(expressions)); + return aggregate(BuiltinFunctionName.COUNT, expressions); + } + + private FunctionExpression function(BuiltinFunctionName functionName, Expression... expressions) { + return (FunctionExpression) repository.compile( + functionName.getName(), Arrays.asList(expressions)); + } + + private Aggregator aggregate(BuiltinFunctionName functionName, Expression... expressions) { + return (Aggregator) repository.compile( + functionName.getName(), Arrays.asList(expressions)); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java index 8f233cf224..c4348ddfcf 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/Expression.java @@ -18,11 +18,12 @@ import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import java.io.Serializable; /** * The definition of the resolved expression. */ -public interface Expression { +public interface Expression extends Serializable { /** * Evaluate the value of expression in the value environment. diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/config/ExpressionConfig.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/config/ExpressionConfig.java index da9d9ea1d1..ea416c47dd 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/config/ExpressionConfig.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/config/ExpressionConfig.java @@ -17,9 +17,10 @@ import com.amazon.opendistroforelasticsearch.sql.expression.DSL; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.AggregatorFunction; +import com.amazon.opendistroforelasticsearch.sql.expression.datetime.DateTimeFunction; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; import com.amazon.opendistroforelasticsearch.sql.expression.operator.arthmetic.ArithmeticFunction; -import com.amazon.opendistroforelasticsearch.sql.expression.operator.arthmetic.UnaryFunction; +import com.amazon.opendistroforelasticsearch.sql.expression.operator.arthmetic.MathematicalFunction; import com.amazon.opendistroforelasticsearch.sql.expression.operator.predicate.BinaryPredicateOperator; import com.amazon.opendistroforelasticsearch.sql.expression.operator.predicate.UnaryPredicateOperator; import java.util.HashMap; @@ -40,9 +41,10 @@ public BuiltinFunctionRepository functionRepository() { new BuiltinFunctionRepository(new HashMap<>()); ArithmeticFunction.register(builtinFunctionRepository); BinaryPredicateOperator.register(builtinFunctionRepository); - UnaryFunction.register(builtinFunctionRepository); + MathematicalFunction.register(builtinFunctionRepository); UnaryPredicateOperator.register(builtinFunctionRepository); AggregatorFunction.register(builtinFunctionRepository); + DateTimeFunction.register(builtinFunctionRepository); return builtinFunctionRepository; } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunction.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunction.java new file mode 100644 index 0000000000..4b96eb80a6 --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunction.java @@ -0,0 +1,61 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression.datetime; + +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getDateValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DATE; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName.DAYOFMONTH; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprIntegerValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionResolver; +import lombok.experimental.UtilityClass; + +/** + * The definition of date and time functions. + * 1) have the clear interface for function define. + * 2) the implementation should rely on ExprValue. + */ +@UtilityClass +public class DateTimeFunction { + public void register(BuiltinFunctionRepository repository) { + repository.register(dayOfMonth()); + } + + /** + * DAYOFMONTH(DATE). return the day of the month (1-31). + */ + private FunctionResolver dayOfMonth() { + return FunctionDSL.define(DAYOFMONTH.getName(), + FunctionDSL.impl(FunctionDSL.nullMissingHandling(DateTimeFunction::exprDayOfMonth), + INTEGER, DATE) + ); + } + + /** + * Day of Month implementation for ExprValue. + * @param date ExprValue of Date type. + * @return ExprValue. + */ + private ExprValue exprDayOfMonth(ExprValue date) { + return new ExprIntegerValue(getDateValue(date).getMonthValue()); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java index 24bb3aafa5..a17b5d65d2 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java @@ -12,7 +12,45 @@ @Getter @RequiredArgsConstructor public enum BuiltinFunctionName { + /** + * Mathematical Functions. + */ ABS(FunctionName.of("abs")), + CEIL(FunctionName.of("ceil")), + CEILING(FunctionName.of("ceiling")), + CONV(FunctionName.of("conv")), + CRC32(FunctionName.of("crc32")), + E(FunctionName.of("e")), + EXP(FunctionName.of("exp")), + FLOOR(FunctionName.of("floor")), + LN(FunctionName.of("ln")), + LOG(FunctionName.of("log")), + LOG10(FunctionName.of("log10")), + LOG2(FunctionName.of("log2")), + MOD(FunctionName.of("mod")), + PI(FunctionName.of("pi")), + POW(FunctionName.of("pow")), + POWER(FunctionName.of("power")), + RAND(FunctionName.of("rand")), + ROUND(FunctionName.of("round")), + SIGN(FunctionName.of("sign")), + SQRT(FunctionName.of("sqrt")), + TRUNCATE(FunctionName.of("truncate")), + + ACOS(FunctionName.of("acos")), + ASIN(FunctionName.of("asin")), + ATAN(FunctionName.of("atan")), + ATAN2(FunctionName.of("atan2")), + COS(FunctionName.of("cos")), + COT(FunctionName.of("cot")), + DEGREES(FunctionName.of("degrees")), + RADIANS(FunctionName.of("radians")), + SIN(FunctionName.of("sin")), + TAN(FunctionName.of("tan")), + + /** + * Text Functions. + */ TOSTRING(FunctionName.of("tostring")), /** @@ -39,6 +77,11 @@ public enum BuiltinFunctionName { GTE(FunctionName.of(">=")), LIKE(FunctionName.of("like")), + /** + * Date and Time Functions. + */ + DAYOFMONTH(FunctionName.of("dayofmonth")), + /** * Aggregation Function. */ diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionDSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionDSL.java new file mode 100644 index 0000000000..841027656f --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionDSL.java @@ -0,0 +1,170 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression.function; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; +import lombok.experimental.UtilityClass; +import org.apache.commons.lang3.tuple.Pair; + +/** + * Function Define Utility. + */ +@UtilityClass +public class FunctionDSL { + /** + * Define overloaded function with implementation. + * + * @param functionName function name. + * @param functions a list of function implementation. + * @return FunctionResolver. + */ + public FunctionResolver define(FunctionName functionName, + Function>... functions) { + return define(functionName, Arrays.asList(functions)); + } + + /** + * Define overloaded function with implementation. + * + * @param functionName function name. + * @param functions a list of function implementation. + * @return FunctionResolver. + */ + public FunctionResolver define(FunctionName functionName, + List>> functions) { + + FunctionResolver.FunctionResolverBuilder builder = FunctionResolver.builder(); + builder.functionName(functionName); + for (Function> func : functions) { + Pair functionBuilder = func.apply(functionName); + builder.functionBundle(functionBuilder.getKey(), functionBuilder.getValue()); + } + return builder.build(); + } + + /** + * Unary Function Implementation. + * + * @param function {@link ExprValue} based unary function. + * @param returnType return type. + * @param argsType argument type. + * @return Unary Function Implementation. + */ + public SerializableFunction> impl( + SerializableFunction function, + ExprType returnType, + ExprType argsType) { + + return functionName -> { + FunctionSignature functionSignature = + new FunctionSignature(functionName, Collections.singletonList(argsType)); + FunctionBuilder functionBuilder = + arguments -> new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue value = arguments.get(0).valueOf(valueEnv); + return function.apply(value); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format("%s(%s)", functionName, + arguments.stream() + .map(Object::toString) + .collect(Collectors.joining(", "))); + } + }; + return Pair.of(functionSignature, functionBuilder); + }; + } + + /** + * Binary Function Implementation. + * + * @param function {@link ExprValue} based unary function. + * @param returnType return type. + * @param args1Type argument type. + * @param args2Type argument type. + * @return Unary Function Implementation. + */ + public SerializableFunction> impl( + SerializableBiFunction function, + ExprType returnType, + ExprType args1Type, + ExprType args2Type) { + + return functionName -> { + FunctionSignature functionSignature = + new FunctionSignature(functionName, Arrays.asList(args1Type, args2Type)); + FunctionBuilder functionBuilder = + arguments -> new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + return function.apply(arg1, arg2); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format("%s %s %s", arguments.get(0).toString(), functionName, arguments + .get(1).toString()); + } + }; + return Pair.of(functionSignature, functionBuilder); + }; + } + + /** + * Wrapper the unary ExprValue function with default NULL and MISSING handling. + */ + public SerializableFunction nullMissingHandling( + SerializableFunction function) { + return value -> { + if (value.isMissing()) { + return ExprValueUtils.missingValue(); + } else if (value.isNull()) { + return ExprValueUtils.nullValue(); + } else { + return function.apply(value); + } + }; + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionName.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionName.java index a3611a0506..d0fe205413 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionName.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/FunctionName.java @@ -15,6 +15,7 @@ package com.amazon.opendistroforelasticsearch.sql.expression.function; +import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -24,12 +25,12 @@ */ @EqualsAndHashCode @RequiredArgsConstructor -public class FunctionName { +public class FunctionName implements Serializable { @Getter private final String functionName; public static FunctionName of(String functionName) { - return new FunctionName(functionName); + return new FunctionName(functionName.toLowerCase()); } @Override diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/SerializableBiFunction.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/SerializableBiFunction.java new file mode 100644 index 0000000000..7fb1b4bd9e --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/SerializableBiFunction.java @@ -0,0 +1,27 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression.function; + +import java.io.Serializable; +import java.util.function.BiFunction; + +/** + * Serializable BiFunction. + */ +public interface SerializableBiFunction extends BiFunction, Serializable { +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/SerializableFunction.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/SerializableFunction.java new file mode 100644 index 0000000000..547c22ae6e --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/SerializableFunction.java @@ -0,0 +1,24 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression.function; + +import java.io.Serializable; +import java.util.function.Function; + +public interface SerializableFunction extends Function, Serializable { +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/OperatorUtils.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/OperatorUtils.java index e8031c88ec..39d4db9c09 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/OperatorUtils.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/OperatorUtils.java @@ -29,11 +29,66 @@ import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.Function; +import java.util.function.Supplier; import java.util.stream.Collectors; import lombok.experimental.UtilityClass; @UtilityClass public class OperatorUtils { + /** + * Construct {@link FunctionBuilder} which call function with three arguments produced by + * observers.In general, if any operand evaluates to a MISSING value, the enclosing operator + * will return MISSING; if none of operands evaluates to a MISSING value but there is an + * operand evaluates to a NULL value, the enclosing operator will return NULL. + * + * @param functionName function name + * @param function {@link BiFunction} + * @param observer1 extract the value of type T from the first argument + * @param observer2 extract the value of type U from the first argument + * @param observer3 extract the value of type V from the first argument + * @param returnType return type + * @param the type of the first argument to the function + * @param the type of the second argument to the function + * @param the type of the third argument to the function + * @param the type of the result of the function + * @return {@link FunctionBuilder} + */ + public static FunctionBuilder tripleArgFunc( + FunctionName functionName, + TriFunction function, + Function observer1, + Function observer2, + Function observer3, + ExprCoreType returnType) { + return arguments -> new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + ExprValue arg3 = arguments.get(2).valueOf(valueEnv); + if (arg1.isMissing() || arg2.isMissing() || arg3.isMissing()) { + return ExprValueUtils.missingValue(); + } else if (arg1.isNull() || arg2.isNull() || arg3.isNull()) { + return ExprValueUtils.nullValue(); + } else { + return ExprValueUtils.fromObjectValue( + function.apply(observer1.apply(arg1), observer2.apply(arg2), observer3.apply(arg3))); + } + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format("%s(%s, %s, %s)", functionName, arguments.get(0).toString(), arguments + .get(1).toString(), arguments.get(2).toString()); + } + }; + } + /** * Construct {@link FunctionBuilder} which call function with arguments produced by observer. * @@ -104,6 +159,57 @@ public String toString() { }; } + /** + * Construct {@link FunctionBuilder} which call function with arguments produced by observer1 and + * observer2 In general, if any operand evaluates to a MISSING value, the enclosing operator will + * return MISSING; if none of operands evaluates to a MISSING value but there is an operand + * evaluates to a NULL value, the enclosing operator will return NULL. + * + * @param functionName function name + * @param function {@link BiFunction} + * @param observer1 extract the value of type T from the first argument + * @param observer2 extract the value of type U from the second argument + * @param returnType return type + * @param the type of the first argument to the function + * @param the type of the second argument to the function + * @param the type of the result of the function + * @return {@link FunctionBuilder} + */ + public static FunctionBuilder doubleArgFunc( + FunctionName functionName, + BiFunction function, + Function observer1, + Function observer2, + ExprType returnType) { + return arguments -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment env) { + ExprValue arg1 = arguments.get(0).valueOf(env); + ExprValue arg2 = arguments.get(1).valueOf(env); + if (arg1.isMissing() || arg2.isMissing()) { + return ExprValueUtils.missingValue(); + } else if (arg1.isNull() || arg2.isNull()) { + return ExprValueUtils.nullValue(); + } else { + return ExprValueUtils.fromObjectValue( + function.apply(observer1.apply(arg1), observer2.apply(arg2))); + } + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format("%s(%s, %s)", functionName, arguments.get(0).toString(), arguments + .get(1).toString()); + } + }; + } + /** * Construct {@link FunctionBuilder} which call function with arguments produced by observer In * general, if any operand evaluates to a MISSING value, the enclosing operator will return @@ -153,22 +259,36 @@ public String toString() { } /** - * String comparator. - */ - public static final BiFunction STRING_COMPARATOR = String::compareTo; - /** - * List comparator. - */ - public static final BiFunction LIST_COMPARATOR = - (left, right) -> Integer.compare(left.size(), right.size()); - /** - * Map comparator. - */ - public static final BiFunction MAP_COMPARATOR = - (left, right) -> Integer.compare(left.size(), right.size()); - /** - * Predicate NULL or MISSING. + * Construct {@link FunctionBuilder} which call function with no argument. + * + * @param functionName function name + * @param function {@link Function} + * @param returnType return type + * @param the type of the result to the function + * @return {@link FunctionBuilder} */ - public static final BiPredicate COMPARE_WITH_NULL_OR_MISSING = - (left, right) -> left.isMissing() || right.isMissing() || left.isNull() || right.isNull(); + public static FunctionBuilder noArgFunction(FunctionName functionName, + Supplier function, + ExprCoreType returnType) { + return arguments -> new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + return ExprValueUtils.fromObjectValue(function.get()); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format("%s()", functionName); + } + }; + } + + public interface TriFunction { + R apply(T t, U u, V v); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunction.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunction.java index 300b89065d..cf296ef056 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunction.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunction.java @@ -94,10 +94,10 @@ private static FunctionResolver divide() { return new FunctionResolver( BuiltinFunctionName.DIVIDE.getName(), scalarFunction(BuiltinFunctionName.DIVIDE.getName(), - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2) + (v1, v2) -> v2 == 0 ? null : v1 / v2, + (v1, v2) -> v2 == 0 ? null : v1 / v2, + (v1, v2) -> v2 == 0 ? null : v1 / v2, + (v1, v2) -> v2 == 0 ? null : v1 / v2) ); } @@ -106,10 +106,10 @@ private static FunctionResolver modules() { return new FunctionResolver( BuiltinFunctionName.MODULES.getName(), scalarFunction(BuiltinFunctionName.MODULES.getName(), - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2) + (v1, v2) -> v2 == 0 ? null : v1 % v2, + (v1, v2) -> v2 == 0 ? null : v1 % v2, + (v1, v2) -> v2 == 0 ? null : v1 % v2, + (v1, v2) -> v2 == 0 ? null : v1 % v2) ); } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/MathematicalFunction.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/MathematicalFunction.java new file mode 100644 index 0000000000..0ba0b6dbcb --- /dev/null +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/MathematicalFunction.java @@ -0,0 +1,733 @@ +/* + * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.sql.expression.operator.arthmetic; + +import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.doubleArgFunc; +import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.noArgFunction; +import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.tripleArgFunc; +import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.unaryOperator; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; +import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionBuilder; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionResolver; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionSignature; +import com.google.common.collect.ImmutableMap; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Random; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.zip.CRC32; +import lombok.experimental.UtilityClass; + +@UtilityClass +public class MathematicalFunction { + /** + * Register Mathematical Functions. + * + * @param repository {@link BuiltinFunctionRepository}. + */ + public static void register(BuiltinFunctionRepository repository) { + repository.register(abs()); + repository.register(ceil()); + repository.register(ceiling()); + repository.register(conv()); + repository.register(crc32()); + repository.register(euler()); + repository.register(exp()); + repository.register(floor()); + repository.register(ln()); + repository.register(log()); + repository.register(log10()); + repository.register(log2()); + repository.register(mod()); + repository.register(pow()); + repository.register(power()); + repository.register(round()); + repository.register(sign()); + repository.register(sqrt()); + repository.register(truncate()); + repository.register(pi()); + repository.register(rand()); + repository.register(acos()); + repository.register(asin()); + repository.register(atan()); + repository.register(atan2()); + repository.register(cos()); + repository.register(cot()); + repository.register(degrees()); + repository.register(radians()); + repository.register(sin()); + repository.register(tan()); + } + + /** + * Definition of abs() function. The supported signature of abs() function are INT -> INT LONG -> + * LONG FLOAT -> FLOAT DOUBLE -> DOUBLE + */ + private static FunctionResolver abs() { + return new FunctionResolver( + BuiltinFunctionName.ABS.getName(), + singleArgumentFunction( + BuiltinFunctionName.ABS.getName(), Math::abs, Math::abs, Math::abs, Math::abs)); + } + + /** + * Definition of ceil(x)/ceiling(x) function. Calculate the next highest integer that x rounds up + * to The supported signature of ceil/ceiling function is DOUBLE -> INTEGER + */ + private static FunctionResolver ceil() { + FunctionName functionName = BuiltinFunctionName.CEIL.getName(); + return new FunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, + v -> ((int) Math.ceil(v)), + ExprValueUtils::getDoubleValue, + ExprCoreType.INTEGER)) + .build()); + } + + private static FunctionResolver ceiling() { + FunctionName functionName = BuiltinFunctionName.CEILING.getName(); + return new FunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, + v -> ((int) Math.ceil(v)), + ExprValueUtils::getDoubleValue, + ExprCoreType.INTEGER)) + .build()); + } + + /** + * Definition of conv(x, a, b) function. + * Convert number x from base a to base b + * The supported signature of floor function is + * (STRING, INTEGER, INTEGER) -> STRING + */ + private static FunctionResolver conv() { + FunctionName functionName = BuiltinFunctionName.CONV.getName(); + return new FunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, + Arrays.asList(ExprCoreType.STRING, ExprCoreType.INTEGER, ExprCoreType.INTEGER)), + tripleArgFunc(functionName, + (num, fromBase, toBase) -> Integer.toString( + Integer.parseInt(num, fromBase), toBase), + ExprValueUtils::getStringValue, ExprValueUtils::getIntegerValue, + ExprValueUtils::getIntegerValue, ExprCoreType.STRING)) + .put( + new FunctionSignature(functionName, + Arrays.asList( + ExprCoreType.INTEGER, ExprCoreType.INTEGER, ExprCoreType.INTEGER)), + tripleArgFunc(functionName, + (num, fromBase, toBase) -> Integer.toString( + Integer.parseInt(num.toString(), fromBase), toBase), + ExprValueUtils::getIntegerValue, ExprValueUtils::getIntegerValue, + ExprValueUtils::getIntegerValue, ExprCoreType.STRING)) + .build()); + } + + /** + * Definition of crc32(x) function. + * Calculate a cyclic redundancy check value and returns a 32-bit unsigned value + * The supported signature of crc32 function is + * STRING -> LONG + */ + private static FunctionResolver crc32() { + FunctionName functionName = BuiltinFunctionName.CRC32.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.STRING)), + unaryOperator( + functionName, + v -> { + CRC32 crc = new CRC32(); + crc.update(v.getBytes()); + return crc.getValue(); + }, + ExprValueUtils::getStringValue, ExprCoreType.LONG)) + .build()); + } + + /** + * Definition of e() function. + * Get the Euler's number. + * () -> DOUBLE + */ + private static FunctionResolver euler() { + FunctionName functionName = BuiltinFunctionName.E.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put(new FunctionSignature(functionName, Collections.emptyList()), + noArgFunction(functionName, () -> Math.E, ExprCoreType.DOUBLE)) + .build()); + } + + /** + * Definition of exp(x) function. Calculate exponent function e to the x The supported signature + * of exp function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver exp() { + return new FunctionResolver( + BuiltinFunctionName.EXP.getName(), + singleArgumentFunction(BuiltinFunctionName.EXP.getName(), Math::exp)); + } + + /** + * Definition of floor(x) function. Calculate the next nearest whole integer that x rounds down to + * The supported signature of floor function is DOUBLE -> INTEGER + */ + private static FunctionResolver floor() { + FunctionName functionName = BuiltinFunctionName.FLOOR.getName(); + return new FunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, + v -> ((int) Math.floor(v)), + ExprValueUtils::getDoubleValue, + ExprCoreType.INTEGER)) + .build()); + } + + /** + * Definition of ln(x) function. Calculate the natural logarithm of x The supported signature of + * ln function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver ln() { + return new FunctionResolver( + BuiltinFunctionName.LN.getName(), + singleArgumentFunction(BuiltinFunctionName.LN.getName(), Math::log)); + } + + /** + * Definition of log(b, x) function. Calculate the logarithm of x using b as the base The + * supported signature of log function is (b: INTEGER/LONG/FLOAT/DOUBLE, x: + * INTEGER/LONG/FLOAT/DOUBLE]) -> DOUBLE + */ + private static FunctionResolver log() { + FunctionName functionName = BuiltinFunctionName.LOG.getName(); + return new FunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, Math::log, ExprValueUtils::getDoubleValue, ExprCoreType.DOUBLE)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE, ExprCoreType.DOUBLE)), + doubleArgFunc( + functionName, + (b, v) -> Math.log(v) / Math.log(b), + ExprValueUtils::getDoubleValue, + ExprValueUtils::getDoubleValue, + ExprCoreType.DOUBLE)) + .build()); + } + + /** + * Definition of log10(x) function. Calculate base-10 logarithm of x The supported signature of + * log function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver log10() { + return new FunctionResolver( + BuiltinFunctionName.LOG10.getName(), + singleArgumentFunction(BuiltinFunctionName.LOG10.getName(), Math::log10)); + } + + /** + * Definition of log2(x) function. Calculate base-2 logarithm of x The supported signature of log + * function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver log2() { + return new FunctionResolver( + BuiltinFunctionName.LOG2.getName(), + singleArgumentFunction(BuiltinFunctionName.LOG2.getName(), v -> Math.log(v) / Math.log(2))); + } + + /** + * Definition of mod(x, y) function. + * Calculate the remainder of x divided by y + * The supported signature of mod function is + * (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) + * -> wider type between types of x and y + */ + private static FunctionResolver mod() { + return new FunctionResolver( + BuiltinFunctionName.MOD.getName(), + doubleArgumentsFunction(BuiltinFunctionName.MOD.getName(), + (v1, v2) -> v2 == 0 ? null : v1 % v2, + (v1, v2) -> v2 == 0 ? null : v1 % v2, + (v1, v2) -> v2 == 0 ? null : v1 % v2, + (v1, v2) -> v2 == 0 ? null : v1 % v2)); + } + + /** + * Definition of pi() function. + * Get the value of pi. + * () -> DOUBLE + */ + private static FunctionResolver pi() { + FunctionName functionName = BuiltinFunctionName.PI.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put(new FunctionSignature(functionName, Collections.emptyList()), + noArgFunction(functionName, () -> Math.PI, ExprCoreType.DOUBLE)) + .build()); + } + + /** + * Definition of pow(x, y)/power(x, y) function. + * Calculate the value of x raised to the power of y + * The supported signature of pow/power function is + * (INTEGER, INTEGER) -> INTEGER + * (LONG, LONG) -> LONG + * (FLOAT, FLOAT) -> FLOAT + * (DOUBLE, DOUBLE) -> DOUBLE + */ + private static FunctionResolver pow() { + FunctionName functionName = BuiltinFunctionName.POW.getName(); + return new FunctionResolver(functionName, doubleArgumentsFunction(functionName, Math::pow)); + } + + private static FunctionResolver power() { + FunctionName functionName = BuiltinFunctionName.POWER.getName(); + return new FunctionResolver(functionName, doubleArgumentsFunction(functionName, Math::pow)); + } + + /** + * Definition of rand() and rand(N) function. + * rand() returns a random floating-point value in the range 0 <= value < 1.0 + * If integer N is specified, the seed is initialized prior to execution. + * One implication of this behavior is with identical argument N,rand(N) returns the same value + * each time, and thus produces a repeatable sequence of column values. + * The supported signature of rand function is + * ([INTEGER]) -> FLOAT + */ + private static FunctionResolver rand() { + FunctionName functionName = BuiltinFunctionName.RAND.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Collections.emptyList()), + noArgFunction(functionName, () -> new Random().nextFloat(), ExprCoreType.FLOAT)) + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.INTEGER)), + unaryOperator( + functionName, n -> new Random(n).nextFloat(), ExprValueUtils::getIntegerValue, + ExprCoreType.FLOAT)) + .build()); + } + + /** + * Definition of round(x)/round(x, d) function. + * Rounds the argument x to d decimal places, d defaults to 0 if not specified. + * The supported signature of round function is + * (x: INTEGER [, y: INTEGER]) -> INTEGER + * (x: LONG [, y: INTEGER]) -> LONG + * (x: FLOAT [, y: INTEGER]) -> FLOAT + * (x: DOUBLE [, y: INTEGER]) -> DOUBLE + */ + private static FunctionResolver round() { + FunctionName functionName = BuiltinFunctionName.ROUND.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.INTEGER)), + unaryOperator( + functionName, v -> (long) Math.round(v), ExprValueUtils::getIntegerValue, + ExprCoreType.LONG)) + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.LONG)), + unaryOperator( + functionName, v -> (long) Math.round(v), ExprValueUtils::getLongValue, + ExprCoreType.LONG)) + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.FLOAT)), + unaryOperator( + functionName, v -> (double) Math.round(v), ExprValueUtils::getFloatValue, + ExprCoreType.DOUBLE)) + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, v -> (double) Math.round(v), ExprValueUtils::getDoubleValue, + ExprCoreType.DOUBLE)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.INTEGER, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.HALF_UP).longValue(), + ExprValueUtils::getIntegerValue, ExprValueUtils::getIntegerValue, + ExprCoreType.LONG)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.LONG, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.HALF_UP).longValue(), + ExprValueUtils::getLongValue, ExprValueUtils::getIntegerValue, + ExprCoreType.LONG)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.FLOAT, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.HALF_UP).doubleValue(), + ExprValueUtils::getFloatValue, ExprValueUtils::getIntegerValue, + ExprCoreType.DOUBLE)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.HALF_UP).doubleValue(), + ExprValueUtils::getDoubleValue, ExprValueUtils::getIntegerValue, + ExprCoreType.DOUBLE)) + .build()); + } + + /** + * Definition of sign(x) function. + * Returns the sign of the argument as -1, 0, or 1 + * depending on whether x is negative, zero, or positive + * The supported signature is + * INTEGER/LONG/FLOAT/DOUBLE -> INTEGER + */ + private static FunctionResolver sign() { + FunctionName functionName = BuiltinFunctionName.SIGN.getName(); + return new FunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, v -> (int) Math.signum(v), ExprValueUtils::getDoubleValue, + ExprCoreType.INTEGER)) + .build()); + } + + /** + * Definition of sqrt(x) function. + * Calculate the square root of a non-negative number x + * The supported signature is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver sqrt() { + FunctionName functionName = BuiltinFunctionName.SQRT.getName(); + return new FunctionResolver( + functionName, + singleArgumentFunction( + functionName, + v -> v < 0 ? null : Math.sqrt(v))); + } + + /** + * Definition of truncate(x, d) function. + * Returns the number x, truncated to d decimal places + * The supported signature of round function is + * (x: INTEGER, y: INTEGER) -> INTEGER + * (x: LONG, y: INTEGER) -> LONG + * (x: FLOAT, y: INTEGER) -> FLOAT + * (x: DOUBLE, y: INTEGER) -> DOUBLE + */ + private static FunctionResolver truncate() { + FunctionName functionName = BuiltinFunctionName.TRUNCATE.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.INTEGER, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.DOWN).longValue(), + ExprValueUtils::getIntegerValue, ExprValueUtils::getIntegerValue, + ExprCoreType.LONG)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.LONG, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.DOWN).longValue(), + ExprValueUtils::getLongValue, ExprValueUtils::getIntegerValue, + ExprCoreType.LONG)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.FLOAT, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.DOWN).doubleValue(), + ExprValueUtils::getFloatValue, ExprValueUtils::getIntegerValue, + ExprCoreType.DOUBLE)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE, ExprCoreType.INTEGER)), + doubleArgFunc(functionName, + (v1, v2) -> new BigDecimal(v1).setScale(v2, RoundingMode.DOWN).doubleValue(), + ExprValueUtils::getDoubleValue, ExprValueUtils::getIntegerValue, + ExprCoreType.DOUBLE)) + .build()); + } + + /** + * Definition of acos(x) function. + * Calculates the arc cosine of x, that is, the value whose cosine is x. + * Returns NULL if x is not in the range -1 to 1. + * The supported signature of acos function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver acos() { + FunctionName functionName = BuiltinFunctionName.ACOS.getName(); + return new FunctionResolver( + functionName, + singleArgumentFunction(functionName, v -> v < -1 || v > 1 ? null : Math.acos(v))); + } + + /** + * Definition of asin(x) function. + * Calculates the arc sine of x, that is, the value whose sine is x. + * Returns NULL if x is not in the range -1 to 1. + * The supported signature of asin function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver asin() { + FunctionName functionName = BuiltinFunctionName.ASIN.getName(); + return new FunctionResolver( + functionName, + singleArgumentFunction(functionName, v -> v < -1 || v > 1 ? null : Math.asin(v))); + } + + /** + * Definition of atan(x) and atan(y, x) function. + * atan(x) calculates the arc tangent of x, that is, the value whose tangent is x. + * atan(y, x) calculates the arc tangent of y / x, except that the signs of both arguments + * are used to determine the quadrant of the result. + * The supported signature of atan function is + * (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) -> DOUBLE + */ + private static FunctionResolver atan() { + FunctionName functionName = BuiltinFunctionName.ATAN.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, Math::atan, ExprValueUtils::getDoubleValue, ExprCoreType.DOUBLE)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE, ExprCoreType.DOUBLE)), + doubleArgFunc(functionName, + Math::atan2, ExprValueUtils::getDoubleValue, ExprValueUtils::getDoubleValue, + ExprCoreType.DOUBLE)) + .build()); + } + + /** + * Definition of atan2(y, x) function. + * Calculates the arc tangent of y / x, except that the signs of both arguments + * are used to determine the quadrant of the result. + * The supported signature of atan2 function is + * (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) -> DOUBLE + */ + private static FunctionResolver atan2() { + FunctionName functionName = BuiltinFunctionName.ATAN2.getName(); + return new FunctionResolver(functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE, ExprCoreType.DOUBLE)), + doubleArgFunc(functionName, + Math::atan2, ExprValueUtils::getDoubleValue, ExprValueUtils::getDoubleValue, + ExprCoreType.DOUBLE)) + .build()); + } + + /** + * Definition of cos(x) function. + * Calculates the cosine of X, where X is given in radians + * The supported signature of cos function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver cos() { + FunctionName functionName = BuiltinFunctionName.COS.getName(); + return new FunctionResolver(functionName, singleArgumentFunction(functionName, Math::cos)); + } + + /** + * Definition of cot(x) function. + * Calculates the cotangent of x + * The supported signature of cot function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver cot() { + FunctionName functionName = BuiltinFunctionName.COT.getName(); + return new FunctionResolver( + functionName, + singleArgumentFunction(functionName, v -> { + if (v == 0) { + throw new ArithmeticException(String.format("Out of range value for cot(%s)", v)); + } + return 1 / Math.tan(v); + })); + } + + /** + * Definition of degrees(x) function. + * Converts x from radians to degrees + * The supported signature of degrees function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver degrees() { + FunctionName functionName = BuiltinFunctionName.DEGREES.getName(); + return new FunctionResolver( + functionName, singleArgumentFunction(functionName, Math::toDegrees)); + } + + /** + * Definition of radians(x) function. + * Converts x from degrees to radians + * The supported signature of radians function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver radians() { + FunctionName functionName = BuiltinFunctionName.RADIANS.getName(); + return new FunctionResolver( + functionName, singleArgumentFunction(functionName, Math::toRadians)); + } + + /** + * Definition of sin(x) function. + * Calculates the sine of x, where x is given in radians + * The supported signature of sin function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver sin() { + FunctionName functionName = BuiltinFunctionName.SIN.getName(); + return new FunctionResolver(functionName, singleArgumentFunction(functionName, Math::sin)); + } + + /** + * Definition of tan(x) function. + * Calculates the tangent of x, where x is given in radians + * The supported signature of tan function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + */ + private static FunctionResolver tan() { + FunctionName functionName = BuiltinFunctionName.TAN.getName(); + return new FunctionResolver(functionName, singleArgumentFunction(functionName, Math::tan)); + } + + /** + * Util method to generate single argument function bundles. Applicable for INTEGER -> INTEGER + * LONG -> LONG FLOAT -> FLOAT DOUBLE -> DOUBLE + */ + private static Map singleArgumentFunction( + FunctionName functionName, + Function integerFunc, + Function longFunc, + Function floatFunc, + Function doubleFunc) { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + builder.put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.INTEGER)), + unaryOperator( + functionName, integerFunc, ExprValueUtils::getIntegerValue, ExprCoreType.INTEGER)); + builder.put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.LONG)), + unaryOperator(functionName, longFunc, ExprValueUtils::getLongValue, ExprCoreType.LONG)); + builder.put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.FLOAT)), + unaryOperator(functionName, floatFunc, ExprValueUtils::getFloatValue, ExprCoreType.FLOAT)); + builder.put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, doubleFunc, ExprValueUtils::getDoubleValue, ExprCoreType.DOUBLE)); + return builder.build(); + } + + /** Util method to generate single argument function bundles. Applicable for DOUBLE -> DOUBLE */ + private static Map singleArgumentFunction( + FunctionName functionName, Function doubleFunc) { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + return builder + .put( + new FunctionSignature(functionName, Arrays.asList(ExprCoreType.DOUBLE)), + unaryOperator( + functionName, doubleFunc, ExprValueUtils::getDoubleValue, ExprCoreType.DOUBLE)) + .build(); + } + + private static Map doubleArgumentsFunction( + FunctionName functionName, + BiFunction intFunc, + BiFunction longFunc, + BiFunction floatFunc, + BiFunction doubleFunc) { + return new ImmutableMap.Builder() + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.INTEGER, ExprCoreType.INTEGER)), + doubleArgFunc( + functionName, intFunc, ExprValueUtils::getIntegerValue, + ExprValueUtils::getIntegerValue, ExprCoreType.INTEGER)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.LONG, ExprCoreType.LONG)), + doubleArgFunc( + functionName, longFunc, ExprValueUtils::getLongValue, + ExprValueUtils::getLongValue, ExprCoreType.LONG)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.FLOAT, ExprCoreType.FLOAT)), + doubleArgFunc( + functionName, floatFunc, ExprValueUtils::getFloatValue, + ExprValueUtils::getFloatValue, ExprCoreType.FLOAT)) + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE, ExprCoreType.DOUBLE)), + doubleArgFunc( + functionName, doubleFunc, ExprValueUtils::getDoubleValue, + ExprValueUtils::getDoubleValue, ExprCoreType.DOUBLE)) + .build(); + } + + private static Map doubleArgumentsFunction( + FunctionName functionName, + BiFunction doubleFunc) { + return new ImmutableMap.Builder() + .put( + new FunctionSignature( + functionName, Arrays.asList(ExprCoreType.DOUBLE, ExprCoreType.DOUBLE)), + doubleArgFunc( + functionName, doubleFunc, ExprValueUtils::getDoubleValue, + ExprValueUtils::getDoubleValue, ExprCoreType.DOUBLE)).build(); + } +} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/UnaryFunction.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/UnaryFunction.java deleted file mode 100644 index b6f0364570..0000000000 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/UnaryFunction.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - */ - -package com.amazon.opendistroforelasticsearch.sql.expression.operator.arthmetic; - -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; -import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.unaryOperator; - -import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; -import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; -import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionBuilder; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionResolver; -import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionSignature; -import com.google.common.collect.ImmutableMap; -import java.util.Arrays; -import java.util.Map; -import java.util.function.Function; -import lombok.experimental.UtilityClass; - -@UtilityClass -public class UnaryFunction { - - public static void register(BuiltinFunctionRepository repository) { - repository.register(abs()); - } - - /** - * Definition of abs() function. - * The supported signature of abs() function are - * INT -> INT - * LONG -> LONG - * FLOAT -> FLOAT - * DOUBLE -> DOUBLE - */ - private static FunctionResolver abs() { - return new FunctionResolver( - BuiltinFunctionName.ABS.getName(), - unaryFunction( - BuiltinFunctionName.ABS.getName(), Math::abs, Math::abs, Math::abs, Math::abs)); - } - - private static Map unaryFunction( - FunctionName functionName, - Function integerFunc, - Function longFunc, - Function floatFunc, - Function doubleFunc) { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - builder.put( - new FunctionSignature(functionName, Arrays.asList(INTEGER)), - unaryOperator( - functionName, integerFunc, ExprValueUtils::getIntegerValue, INTEGER)); - builder.put( - new FunctionSignature(functionName, Arrays.asList(LONG)), - unaryOperator(functionName, longFunc, ExprValueUtils::getLongValue, LONG)); - builder.put( - new FunctionSignature(functionName, Arrays.asList(FLOAT)), - unaryOperator(functionName, floatFunc, ExprValueUtils::getFloatValue, FLOAT)); - builder.put( - new FunctionSignature(functionName, Arrays.asList(DOUBLE)), - unaryOperator(functionName, doubleFunc, ExprValueUtils::getDoubleValue, DOUBLE)); - return builder.build(); - } -} diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java index d843fdc200..edfd60c153 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperator.java @@ -19,17 +19,16 @@ import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_TRUE; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.ARRAY; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRUCT; import static com.amazon.opendistroforelasticsearch.sql.expression.operator.OperatorUtils.binaryOperator; import static com.amazon.opendistroforelasticsearch.sql.utils.OperatorUtils.matches; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; @@ -40,6 +39,7 @@ import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionBuilder; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionResolver; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionSignature; @@ -47,10 +47,9 @@ import com.google.common.collect.ImmutableTable; import com.google.common.collect.Table; import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.function.BiFunction; -import java.util.function.Function; +import java.util.stream.Collectors; import lombok.experimental.UtilityClass; /** @@ -164,38 +163,6 @@ public static void register(BuiltinFunctionRepository repository) { .put(LITERAL_MISSING, LITERAL_MISSING, LITERAL_MISSING) .build(); - /** - * The equalTo logic. - * A B A == B - * NULL NULL TRUE - * NULL MISSING FALSE - * MISSING NULL FALSE - * MISSING MISSING TRUE - */ - private static Table equalTable = - new ImmutableTable.Builder() - .put(LITERAL_NULL, LITERAL_NULL, LITERAL_TRUE) - .put(LITERAL_NULL, LITERAL_MISSING, LITERAL_FALSE) - .put(LITERAL_MISSING, LITERAL_NULL, LITERAL_FALSE) - .put(LITERAL_MISSING, LITERAL_MISSING, LITERAL_TRUE) - .build(); - - /** - * The notEqualTo logic. - * A B A != B - * NULL NULL FALSE - * NULL MISSING TRUE - * MISSING NULL TRUE - * MISSING MISSING FALSE - */ - private static Table notEqualTable = - new ImmutableTable.Builder() - .put(LITERAL_NULL, LITERAL_NULL, LITERAL_FALSE) - .put(LITERAL_NULL, LITERAL_MISSING, LITERAL_TRUE) - .put(LITERAL_MISSING, LITERAL_NULL, LITERAL_TRUE) - .put(LITERAL_MISSING, LITERAL_MISSING, LITERAL_FALSE) - .build(); - private static FunctionResolver and() { FunctionName functionName = BuiltinFunctionName.AND.getName(); return FunctionResolver.builder() @@ -227,41 +194,21 @@ private static FunctionResolver xor() { } private static FunctionResolver equal() { - return new FunctionResolver( - BuiltinFunctionName.EQUAL.getName(), - predicate( - BuiltinFunctionName.EQUAL.getName(), - equalTable, - LITERAL_FALSE, - Integer::equals, - Long::equals, - Float::equals, - Double::equals, - String::equals, - Boolean::equals, - List::equals, - Map::equals - ) - ); + return FunctionDSL.define(BuiltinFunctionName.EQUAL.getName(), + ExprCoreType.coreTypes().stream() + .map(type -> FunctionDSL.impl((v1, v2) -> ExprBooleanValue.of(v1.equals(v2)), + BOOLEAN, type, type)) + .collect( + Collectors.toList())); } private static FunctionResolver notEqual() { - return new FunctionResolver( - BuiltinFunctionName.NOTEQUAL.getName(), - predicate( - BuiltinFunctionName.NOTEQUAL.getName(), - notEqualTable, - LITERAL_TRUE, - (v1, v2) -> ! v1.equals(v2), - (v1, v2) -> ! v1.equals(v2), - (v1, v2) -> ! v1.equals(v2), - (v1, v2) -> ! v1.equals(v2), - (v1, v2) -> ! v1.equals(v2), - (v1, v2) -> ! v1.equals(v2), - (v1, v2) -> ! v1.equals(v2), - (v1, v2) -> ! v1.equals(v2) - ) - ); + return FunctionDSL + .define(BuiltinFunctionName.NOTEQUAL.getName(), ExprCoreType.coreTypes().stream() + .map(type -> FunctionDSL + .impl((v1, v2) -> ExprBooleanValue.of(!v1.equals(v2)), BOOLEAN, type, type)) + .collect( + Collectors.toList())); } private static FunctionResolver less() { @@ -330,52 +277,6 @@ private static FunctionResolver like() { ); } - /** - * Util method to generate EQUAL/NOT EQUAL operation bundles. - * Applicable for integer, long, float, double, string types of operands - * {@param defaultValue} Default value for one missing/null operand - */ - private static Map predicate( - FunctionName functionName, - Table table, - ExprValue defaultValue, - BiFunction integerFunc, - BiFunction longFunc, - BiFunction floatFunc, - BiFunction doubleFunc, - BiFunction stringFunc, - BiFunction booleanFunc, - BiFunction listFunc, - BiFunction mapFunc) { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - return builder - .put(new FunctionSignature(functionName, Arrays.asList(INTEGER, INTEGER)), - equalPredicate(functionName, table, integerFunc, ExprValueUtils::getIntegerValue, - defaultValue, BOOLEAN)) - .put(new FunctionSignature(functionName, Arrays.asList(LONG, LONG)), - equalPredicate(functionName, table, longFunc, ExprValueUtils::getLongValue, - defaultValue, BOOLEAN)) - .put(new FunctionSignature(functionName, Arrays.asList(FLOAT, FLOAT)), - equalPredicate(functionName, table, floatFunc, ExprValueUtils::getFloatValue, - defaultValue, BOOLEAN)) - .put(new FunctionSignature(functionName, Arrays.asList(DOUBLE, DOUBLE)), - equalPredicate(functionName, table, doubleFunc, ExprValueUtils::getDoubleValue, - defaultValue, BOOLEAN)) - .put(new FunctionSignature(functionName, Arrays.asList(STRING, STRING)), - equalPredicate(functionName, table, stringFunc, ExprValueUtils::getStringValue, - defaultValue, BOOLEAN)) - .put(new FunctionSignature(functionName, Arrays.asList(BOOLEAN, BOOLEAN)), - equalPredicate(functionName, table, booleanFunc, ExprValueUtils::getBooleanValue, - defaultValue, BOOLEAN)) - .put(new FunctionSignature(functionName, Arrays.asList(ARRAY, ARRAY)), - equalPredicate(functionName, table, listFunc, ExprValueUtils::getCollectionValue, - defaultValue, BOOLEAN)) - .put(new FunctionSignature(functionName, Arrays.asList(STRUCT, STRUCT)), - equalPredicate(functionName, table, mapFunc, ExprValueUtils::getTupleValue, - defaultValue, BOOLEAN)) - .build(); - } - /** * Util method to generate binary predicate bundles. * Applicable for integer, long, float, double, string types of operands @@ -461,43 +362,4 @@ public String toString() { } }; } - - /** - * Building method for equalTo and notEqualTo operators. - * - * @param defaultValue the return value when expr value is missing/null - */ - private static FunctionBuilder equalPredicate(FunctionName functionName, - Table table, - BiFunction function, - Function observer, - ExprValue defaultValue, - ExprType returnType) { - return arguments -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment env) { - ExprValue arg1 = arguments.get(0).valueOf(env); - ExprValue arg2 = arguments.get(1).valueOf(env); - if (table.contains(arg1, arg2)) { - return table.get(arg1, arg2); - } else if (arg1.isMissing() || arg1.isNull() || arg2.isMissing() || arg2.isNull()) { - return defaultValue; - } else { - return ExprValueUtils.fromObjectValue( - function.apply(observer.apply(arg1), observer.apply(arg2))); - } - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s %s %s", arguments.get(0).toString(), functionName, arguments - .get(1).toString()); - } - }; - } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java index b38bc78377..e3eb0b043c 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/config/TestConfig.java @@ -40,6 +40,8 @@ public class TestConfig { public static final String INT_TYPE_NULL_VALUE_FIELD = "int_null_value"; public static final String INT_TYPE_MISSING_VALUE_FIELD = "int_missing_value"; + public static final String DOUBLE_TYPE_NULL_VALUE_FIELD = "double_null_value"; + public static final String DOUBLE_TYPE_MISSING_VALUE_FIELD = "double_missing_value"; public static final String BOOL_TYPE_NULL_VALUE_FIELD = "null_value_boolean"; public static final String BOOL_TYPE_MISSING_VALUE_FIELD = "missing_value_boolean"; public static final String STRING_TYPE_NULL_VALUE_FILED = "string_null_value"; @@ -52,6 +54,8 @@ public class TestConfig { .put("long_value", ExprCoreType.LONG) .put("float_value", ExprCoreType.FLOAT) .put("double_value", ExprCoreType.DOUBLE) + .put(DOUBLE_TYPE_NULL_VALUE_FIELD, ExprCoreType.DOUBLE) + .put(DOUBLE_TYPE_MISSING_VALUE_FIELD, ExprCoreType.DOUBLE) .put("boolean_value", ExprCoreType.BOOLEAN) .put(BOOL_TYPE_NULL_VALUE_FIELD, ExprCoreType.BOOLEAN) .put(BOOL_TYPE_MISSING_VALUE_FIELD, ExprCoreType.BOOLEAN) diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/DateTimeValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/DateTimeValueTest.java new file mode 100644 index 0000000000..be4ad7f7fc --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/DateTimeValueTest.java @@ -0,0 +1,94 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.data.model; + +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.integerValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIME; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIMESTAMP; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; +import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import org.junit.jupiter.api.Test; + +public class DateTimeValueTest { + + @Test + public void timeValueInterfaceTest() { + ExprValue timeValue = new ExprTimeValue("01:01:01"); + + assertEquals(TIME, timeValue.type()); + assertEquals(LocalTime.parse("01:01:01"), timeValue.timeValue()); + assertEquals("01:01:01", timeValue.value()); + assertEquals("TIME '01:01:01'", timeValue.toString()); + } + + @Test + public void timestampValueInterfaceTest() { + ExprValue timestampValue = new ExprTimestampValue("2020-07-07 01:01:01"); + + assertEquals(TIMESTAMP, timestampValue.type()); + assertEquals(Instant.ofEpochSecond(1594083661), timestampValue.timestampValue()); + assertEquals("2020-07-07 01:01:01", timestampValue.value()); + assertEquals("TIMESTAMP '2020-07-07 01:01:01'", timestampValue.toString()); + } + + @Test + public void dateValueInterfaceTest() { + ExprValue dateValue = new ExprDateValue("2012-07-07"); + + assertEquals(LocalDate.parse("2012-07-07").atStartOfDay(ZoneId.of("UTC")), + dateValue.dateValue()); + ExpressionEvaluationException exception = + assertThrows(ExpressionEvaluationException.class, + () -> ExprValueUtils.getDateValue(integerValue(1))); + assertEquals("invalid to get dateValue from value of type INTEGER", + exception.getMessage()); + } + + @Test + public void dateInUnsupportedFormat() { + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, () -> new ExprDateValue("2020-07-07Z")); + assertEquals("date:2020-07-07Z in unsupported format, please use yyyy-MM-dd", + exception.getMessage()); + } + + @Test + public void timeInUnsupportedFormat() { + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, () -> new ExprTimeValue("01:01:0")); + assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss", + exception.getMessage()); + } + + @Test + public void timestampInUnsupportedFormat() { + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, + () -> new ExprTimestampValue("2020-07-07T01:01:01Z")); + assertEquals( + "timestamp:2020-07-07T01:01:01Z in unsupported format, please use yyyy-MM-dd HH:mm:ss", + exception.getMessage()); + } +} diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValueTest.java index 64e932a0fe..83085ac1d5 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprCollectionValueTest.java @@ -17,13 +17,43 @@ import static com.amazon.opendistroforelasticsearch.sql.utils.ComparisonUtil.compare; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import java.util.Arrays; import org.junit.jupiter.api.Test; public class ExprCollectionValueTest { + @Test + public void equal_to_itself() { + ExprValue value = ExprValueUtils.collectionValue(ImmutableList.of(1)); + assertTrue(value.equals(value)); + } + + @Test + public void collection_compare_int() { + ExprValue intValue = ExprValueUtils.integerValue(10); + ExprValue value = ExprValueUtils.collectionValue(ImmutableList.of(1)); + assertFalse(value.equals(intValue)); + } + + @Test + public void compare_collection_with_different_size() { + ExprValue value1 = ExprValueUtils.collectionValue(ImmutableList.of(1)); + ExprValue value2 = ExprValueUtils.collectionValue(ImmutableList.of(1, 2)); + assertFalse(value1.equals(value2)); + assertFalse(value2.equals(value1)); + } + + @Test + public void compare_collection_with_int_object() { + ExprValue value = ExprValueUtils.collectionValue(ImmutableList.of(1)); + assertFalse(value.equals(1)); + } @Test public void comparabilityTest() { diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java index fcfb072b90..cd79cc13d5 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValueTest.java @@ -40,7 +40,7 @@ public void tuple_compare_int() { } @Test - public void compre_tuple_with_different_size() { + public void compare_tuple_with_different_size() { ExprValue tupleValue1 = ExprValueUtils.tupleValue(ImmutableMap.of("integer_value", 2)); ExprValue tupleValue2 = ExprValueUtils.tupleValue(ImmutableMap.of("integer_value", 2, "float_value", 1f)); diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueCompareTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueCompareTest.java new file mode 100644 index 0000000000..e5d6d877b3 --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueCompareTest.java @@ -0,0 +1,83 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.data.model; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +public class ExprValueCompareTest { + + @Test + public void timeValueCompare() { + assertEquals(0, new ExprTimeValue("18:00:00").compareTo(new ExprTimeValue("18:00:00"))); + assertEquals(1, new ExprTimeValue("19:00:00").compareTo(new ExprTimeValue("18:00:00"))); + assertEquals(-1, new ExprTimeValue("18:00:00").compareTo(new ExprTimeValue("19:00:00"))); + } + + @Test + public void dateValueCompare() { + assertEquals(0, new ExprDateValue("2012-08-07").compareTo(new ExprDateValue("2012-08-07"))); + assertEquals(1, new ExprDateValue("2012-08-08").compareTo(new ExprDateValue("2012-08-07"))); + assertEquals(-1, new ExprDateValue("2012-08-07").compareTo(new ExprDateValue("2012-08-08"))); + } + + @Test + public void timestampValueCompare() { + assertEquals(0, + new ExprTimestampValue("2012-08-07 18:00:00") + .compareTo(new ExprTimestampValue("2012-08-07 18:00:00"))); + assertEquals(1, + new ExprTimestampValue("2012-08-07 19:00:00") + .compareTo(new ExprTimestampValue("2012-08-07 18:00:00"))); + assertEquals(-1, + new ExprTimestampValue("2012-08-07 18:00:00") + .compareTo(new ExprTimestampValue("2012-08-07 19:00:00"))); + } + + @Test + public void nullValueEqualToNullValue() { + assertEquals(0, ExprNullValue.of().compareTo(ExprNullValue.of())); + } + + @Test + public void nullValueLessThanNotNullValue() { + assertEquals(-1, ExprNullValue.of().compareTo(ExprBooleanValue.of(true))); + assertEquals(1, ExprBooleanValue.of(true).compareTo(ExprNullValue.of())); + } + + @Test + public void missingValueEqualToMissingValue() { + assertEquals(0, ExprMissingValue.of().compareTo(ExprMissingValue.of())); + } + + @Test + public void missingValueLessThanNotMissingValue() { + assertEquals(-1, ExprMissingValue.of().compareTo(ExprBooleanValue.of(true))); + assertEquals(1, ExprBooleanValue.of(true).compareTo(ExprMissingValue.of())); + + assertEquals(-1, ExprMissingValue.of().compareTo(ExprNullValue.of())); + assertEquals(1, ExprNullValue.of().compareTo(ExprMissingValue.of())); + } + + @Test + public void missingValueLessThanNullValue() { + assertEquals(-1, ExprMissingValue.of().compareTo(ExprNullValue.of())); + assertEquals(1, ExprNullValue.of().compareTo(ExprMissingValue.of())); + } +} diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtilsTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtilsTest.java index d0814ff672..cc9334602e 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtilsTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValueUtilsTest.java @@ -16,24 +16,36 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.integerValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DATE; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIME; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIMESTAMP; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import com.amazon.opendistroforelasticsearch.sql.storage.bindingtuple.BindingTuple; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.hamcrest.Matchers; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -42,10 +54,25 @@ @DisplayName("Test Expression Value Utils") public class ExprValueUtilsTest { + private static LinkedHashMap testTuple = new LinkedHashMap<>(); + + static { + testTuple.put("1", new ExprIntegerValue(1)); + } + private static List numberValues = Stream.of(1, 1L, 1f, 1D) .map(ExprValueUtils::fromObjectValue).collect(Collectors.toList()); - private static List nonNumberValues = Stream.of("1", true, Arrays.asList(1), - ImmutableMap.of("1", 1)).map(ExprValueUtils::fromObjectValue).collect(Collectors.toList()); + + private static List nonNumberValues = Arrays.asList( + new ExprStringValue("1"), + ExprBooleanValue.of(true), + new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))), + new ExprTupleValue(testTuple), + new ExprDateValue("2012-08-07"), + new ExprTimeValue("18:00:00"), + new ExprTimestampValue("2012-08-07 18:00:00") + ); + private static List allValues = Lists.newArrayList(Iterables.concat(numberValues, nonNumberValues)); @@ -58,7 +85,10 @@ public class ExprValueUtilsTest { ExprValueUtils::getStringValue, ExprValueUtils::getBooleanValue, ExprValueUtils::getCollectionValue, - ExprValueUtils::getTupleValue); + ExprValueUtils::getTupleValue, + ExprValue::dateValue, + ExprValue::timeValue, + ExprValue::timestampValue); private static List> allValueExtractor = Lists.newArrayList( Iterables.concat(numberValueExtractor, nonNumberValueExtractor)); @@ -67,14 +97,18 @@ public class ExprValueUtilsTest { ExprCoreType.DOUBLE); private static List nonNumberTypes = Arrays.asList(ExprCoreType.STRING, ExprCoreType.BOOLEAN, ExprCoreType.ARRAY, - ExprCoreType.STRUCT); + ExprCoreType.STRUCT, DATE, TIME, TIMESTAMP); private static List allTypes = Lists.newArrayList(Iterables.concat(numberTypes, nonNumberTypes)); private static Stream getValueTestArgumentStream() { List expectedValues = Arrays.asList(1, 1L, 1f, 1D, "1", true, Arrays.asList(integerValue(1)), - ImmutableMap.of("1", integerValue(1))); + ImmutableMap.of("1", integerValue(1)), + LocalDate.parse("2012-08-07").atStartOfDay(ZoneId.of("UTC")), + LocalTime.parse("18:00:00"), + Instant.ofEpochSecond(1344362400) + ); Stream.Builder builder = Stream.builder(); for (int i = 0; i < expectedValues.size(); i++) { builder.add(Arguments.of( @@ -148,9 +182,7 @@ public void getType(ExprValue value, ExprCoreType expectType) { public void invalidGetNumberValue(ExprValue value, Function extractor) { Exception exception = assertThrows(ExpressionEvaluationException.class, () -> extractor.apply(value)); - assertEquals( - String.format("invalid to getNumberValue with expression has type of %s", value.type()), - exception.getMessage()); + assertThat(exception.getMessage(), Matchers.containsString("invalid")); } /** @@ -162,9 +194,7 @@ public void invalidConvertExprValue(ExprValue value, Function ExprCoreType toType) { Exception exception = assertThrows(ExpressionEvaluationException.class, () -> extractor.apply(value)); - assertEquals(String - .format("invalid to convert expression with type:%s to type:%s", value.type(), toType), - exception.getMessage()); + assertThat(exception.getMessage(), Matchers.containsString("invalid")); } @Test @@ -187,4 +217,30 @@ public void bindingTuples() { } } } + + @Test + public void constructDateAndTimeValue() { + assertEquals(new ExprDateValue("2012-07-07"), + ExprValueUtils.fromObjectValue("2012-07-07", DATE)); + assertEquals(new ExprTimeValue("01:01:01"), + ExprValueUtils.fromObjectValue("01:01:01", TIME)); + assertEquals(new ExprTimestampValue("2012-07-07 01:01:01"), + ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", TIMESTAMP)); + } + + @Test + public void hashCodeTest() { + assertEquals(new ExprIntegerValue(1).hashCode(), new ExprIntegerValue(1).hashCode()); + assertEquals(new ExprStringValue("1").hashCode(), new ExprStringValue("1").hashCode()); + assertEquals(new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode(), + new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode()); + assertEquals(new ExprTupleValue(testTuple).hashCode(), + new ExprTupleValue(testTuple).hashCode()); + assertEquals(new ExprDateValue("2012-08-07").hashCode(), + new ExprDateValue("2012-08-07").hashCode()); + assertEquals(new ExprTimeValue("18:00:00").hashCode(), + new ExprTimeValue("18:00:00").hashCode()); + assertEquals(new ExprTimestampValue("2012-08-07 18:00:00").hashCode(), + new ExprTimestampValue("2012-08-07 18:00:00").hashCode()); + } } \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java index d2ac42afc4..755501f86c 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/utils/ExprValueOrderingTest.java @@ -205,43 +205,4 @@ public void order_compare_value_with_different_type() { "compare expected value have same type, but with [INTEGER, DOUBLE]", exception.getMessage()); } - - @Test - public void order_compare_value_with_null_value() { - ExprValueOrdering ordering = ExprValueOrdering.natural(); - ExpressionEvaluationException exception = - assertThrows( - ExpressionEvaluationException.class, - () -> ordering.compare(integerValue(1), LITERAL_NULL)); - assertEquals("compare with null or missing value is invalid", exception.getMessage()); - - exception = - assertThrows( - ExpressionEvaluationException.class, - () -> ordering.compare(integerValue(1), LITERAL_MISSING)); - assertEquals("compare with null or missing value is invalid", exception.getMessage()); - - exception = - assertThrows( - ExpressionEvaluationException.class, - () -> ordering.compare(LITERAL_NULL, integerValue(1))); - assertEquals("compare with null or missing value is invalid", exception.getMessage()); - - exception = - assertThrows( - ExpressionEvaluationException.class, - () -> ordering.compare(LITERAL_MISSING, integerValue(1))); - assertEquals("compare with null or missing value is invalid", exception.getMessage()); - } - - @Test - public void order_compare_unknown_type() { - when(left.type()).thenReturn(ExprCoreType.UNKNOWN); - when(right.type()).thenReturn(ExprCoreType.UNKNOWN); - - ExprValueOrdering ordering = ExprValueOrdering.natural(); - ExpressionEvaluationException exception = - assertThrows(ExpressionEvaluationException.class, () -> ordering.compare(left, right)); - assertEquals("compare doesn't support type [UNKNOWN]", exception.getMessage()); - } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java index bfda027f26..1caeba23cc 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java @@ -17,6 +17,8 @@ import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.BOOL_TYPE_MISSING_VALUE_FIELD; import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.BOOL_TYPE_NULL_VALUE_FIELD; +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.DOUBLE_TYPE_MISSING_VALUE_FIELD; +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.DOUBLE_TYPE_NULL_VALUE_FIELD; import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.INT_TYPE_MISSING_VALUE_FIELD; import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.INT_TYPE_NULL_VALUE_FIELD; import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.STRING_TYPE_MISSING_VALUE_FILED; @@ -83,10 +85,12 @@ protected Environment valueEnv() { return collectionValue(ImmutableList.of(1)); case BOOL_TYPE_NULL_VALUE_FIELD: case INT_TYPE_NULL_VALUE_FIELD: + case DOUBLE_TYPE_NULL_VALUE_FIELD: case STRING_TYPE_NULL_VALUE_FILED: return nullValue(); case INT_TYPE_MISSING_VALUE_FIELD: case BOOL_TYPE_MISSING_VALUE_FIELD: + case DOUBLE_TYPE_MISSING_VALUE_FIELD: case STRING_TYPE_MISSING_VALUE_FILED: return missingValue(); default: diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunctionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunctionTest.java new file mode 100644 index 0000000000..32e4f9b257 --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunctionTest.java @@ -0,0 +1,74 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.expression.datetime; + +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.integerValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.missingValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.nullValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DATE; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprDateValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; +import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionTestBase; +import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; +import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class DateTimeFunctionTest extends ExpressionTestBase { + @Mock + Environment env; + + @Mock + Expression nullRef; + + @Mock + Expression missingRef; + + @BeforeEach + public void setup() { + when(nullRef.valueOf(env)).thenReturn(nullValue()); + when(missingRef.valueOf(env)).thenReturn(missingValue()); + } + + @Test + public void dayOfMonth() { + when(nullRef.type()).thenReturn(DATE); + when(missingRef.type()).thenReturn(DATE); + + FunctionExpression expression = dsl.dayofmonth(DSL.literal(new ExprDateValue("2020-07-07"))); + assertEquals(INTEGER, expression.type()); + assertEquals("dayofmonth(DATE '2020-07-07')", expression.toString()); + assertEquals(integerValue(7), eval(expression)); + assertEquals(nullValue(), eval(dsl.dayofmonth(nullRef))); + assertEquals(missingValue(), eval(dsl.dayofmonth(missingRef))); + } + + private ExprValue eval(Expression expression) { + return expression.valueOf(env); + } +} \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionNameTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionNameTest.java index 6feeb831b4..f73ec06812 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionNameTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionNameTest.java @@ -20,6 +20,7 @@ import java.util.Arrays; import java.util.stream.Stream; +import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -39,4 +40,10 @@ public void of(String name, BuiltinFunctionName expected) { assertTrue(BuiltinFunctionName.of(name).isPresent()); assertEquals(expected, BuiltinFunctionName.of(name).get()); } + + @Test + public void caseInsensitive() { + assertTrue(BuiltinFunctionName.of("aBs").isPresent()); + assertEquals(BuiltinFunctionName.of("aBs").get(), BuiltinFunctionName.ABS); + } } \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java index 002d2f1b19..877042ef32 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/ArithmeticFunctionTest.java @@ -24,6 +24,7 @@ import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.literal; import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; @@ -169,6 +170,12 @@ public void divide(ExprValue op1, ExprValue op2) { assertEquals(expectedType, expression.type()); assertValueEqual(BuiltinFunctionName.DIVIDE, expectedType, op1, op2, expression.valueOf(null)); assertEquals(String.format("%s / %s", op1.toString(), op2.toString()), expression.toString()); + + expression = dsl.divide(literal(op1), literal(0)); + expectedType = WideningTypeRule.max(op1.type(), INTEGER); + assertEquals(expectedType, expression.type()); + assertTrue(expression.valueOf(valueEnv()).isNull()); + assertEquals(String.format("%s / 0", op1.toString()), expression.toString()); } @ParameterizedTest(name = "module({1}, {2})") @@ -179,6 +186,12 @@ public void module(ExprValue op1, ExprValue op2) { assertEquals(expectedType, expression.type()); assertValueEqual(BuiltinFunctionName.MODULES, expectedType, op1, op2, expression.valueOf(null)); assertEquals(op1.toString() + " % " + op2.toString(), expression.toString()); + + expression = dsl.module(literal(op1), literal(0)); + expectedType = WideningTypeRule.max(op1.type(), INTEGER); + assertEquals(expectedType, expression.type()); + assertTrue(expression.valueOf(valueEnv()).isNull()); + assertEquals(op1.toString() + " % 0", expression.toString()); } protected void assertValueEqual(BuiltinFunctionName builtinFunctionName, ExprType type, diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java new file mode 100644 index 0000000000..bf1cf97ead --- /dev/null +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/MathematicalFunctionTest.java @@ -0,0 +1,2243 @@ +/* + * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.sql.expression.operator.arthmetic; + +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.DOUBLE_TYPE_MISSING_VALUE_FIELD; +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.DOUBLE_TYPE_NULL_VALUE_FIELD; +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.INT_TYPE_MISSING_VALUE_FIELD; +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.INT_TYPE_NULL_VALUE_FIELD; +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.STRING_TYPE_MISSING_VALUE_FILED; +import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.STRING_TYPE_NULL_VALUE_FILED; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getDoubleValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.getFloatValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.utils.MatcherUtils.hasType; +import static com.amazon.opendistroforelasticsearch.sql.utils.MatcherUtils.hasValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionTestBase; +import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.Random; +import java.util.stream.Stream; +import java.util.zip.CRC32; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class MathematicalFunctionTest extends ExpressionTestBase { + private static Stream testLogIntegerArguments() { + Stream.Builder builder = Stream.builder(); + return builder.add(Arguments.of(2, 2)).build(); + } + + private static Stream testLogLongArguments() { + Stream.Builder builder = Stream.builder(); + return builder.add(Arguments.of(2L, 2L)).build(); + } + + private static Stream testLogFloatArguments() { + Stream.Builder builder = Stream.builder(); + return builder.add(Arguments.of(2F, 2F)).build(); + } + + private static Stream testLogDoubleArguments() { + Stream.Builder builder = Stream.builder(); + return builder.add(Arguments.of(2D, 2D)).build(); + } + + private static Stream trigonometricArguments() { + Stream.Builder builder = Stream.builder(); + return builder + .add(Arguments.of(1)).add(Arguments.of(1L)).add(Arguments.of(1F)).add(Arguments.of(1D)) + .build(); + } + + private static Stream trigonometricDoubleArguments() { + Stream.Builder builder = Stream.builder(); + return builder + .add(Arguments.of(1, 2)).add(Arguments.of(1L, 2L)).add(Arguments.of(1F, 2F)) + .add(Arguments.of(1D, 2D)).build(); + } + + /** + * Test abs with integer value. + */ + @ParameterizedTest(name = "abs({0})") + @ValueSource(ints = {-2, 2}) + public void abs_int_value(Integer value) { + FunctionExpression abs = dsl.abs(DSL.literal(value)); + assertThat( + abs.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue(Math.abs(value)))); + assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); + } + + /** + * Test abs with long value. + */ + @ParameterizedTest(name = "abs({0})") + @ValueSource(longs = {-2L, 2L}) + public void abs_long_value(Long value) { + FunctionExpression abs = dsl.abs(DSL.literal(value)); + assertThat( + abs.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue(Math.abs(value)))); + assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); + } + + /** + * Test abs with float value. + */ + @ParameterizedTest(name = "abs({0})") + @ValueSource(floats = {-2f, 2f}) + public void abs_float_value(Float value) { + FunctionExpression abs = dsl.abs(DSL.literal(value)); + assertThat( + abs.valueOf(valueEnv()), + allOf(hasType(FLOAT), hasValue(Math.abs(value)))); + assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); + } + + /** + * Test abs with double value. + */ + @ParameterizedTest(name = "abs({0})") + @ValueSource(doubles = {-2L, 2L}) + public void abs_double_value(Double value) { + FunctionExpression abs = dsl.abs(DSL.literal(value)); + assertThat( + abs.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.abs(value)))); + assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); + } + + @Test + public void abs_null_value() { + assertTrue(dsl.abs(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)).valueOf(valueEnv()).isNull()); + } + + @Test + public void abs_missing_value() { + assertTrue( + dsl.abs(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)).valueOf(valueEnv()).isMissing()); + } + + /** + * Test ceil/ceiling with integer value. + */ + @ParameterizedTest(name = "ceil({0})") + @ValueSource(ints = {2, -2}) + public void ceil_int_value(Integer value) { + FunctionExpression ceil = dsl.ceil(DSL.literal(value)); + assertThat( + ceil.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); + + FunctionExpression ceiling = dsl.ceiling(DSL.literal(value)); + assertThat( + ceiling.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); + } + + /** + * Test ceil/ceiling with long value. + */ + @ParameterizedTest(name = "ceil({0})") + @ValueSource(longs = {2L, -2L}) + public void ceil_long_value(Long value) { + FunctionExpression ceil = dsl.ceil(DSL.literal(value)); + assertThat( + ceil.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); + + FunctionExpression ceiling = dsl.ceiling(DSL.literal(value)); + assertThat( + ceiling.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); + } + + /** + * Test ceil/ceiling with float value. + */ + @ParameterizedTest(name = "ceil({0})") + @ValueSource(floats = {2F, -2F}) + public void ceil_float_value(Float value) { + FunctionExpression ceil = dsl.ceil(DSL.literal(value)); + assertThat( + ceil.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); + + FunctionExpression ceiling = dsl.ceiling(DSL.literal(value)); + assertThat( + ceiling.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); + } + + /** + * Test ceil/ceiling with double value. + */ + @ParameterizedTest(name = "ceil({0})") + @ValueSource(doubles = {-2L, 2L}) + public void ceil_double_value(Double value) { + FunctionExpression ceil = dsl.ceil(DSL.literal(value)); + assertThat( + ceil.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceil(%s)", value.toString()), ceil.toString()); + + FunctionExpression ceiling = dsl.ceiling(DSL.literal(value)); + assertThat( + ceiling.valueOf(valueEnv()), allOf(hasType(INTEGER), hasValue((int) Math.ceil(value)))); + assertEquals(String.format("ceiling(%s)", value.toString()), ceiling.toString()); + } + + /** + * Test ceil/ceiling with null value. + */ + @Test + public void ceil_null_value() { + FunctionExpression ceil = dsl.ceil(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(INTEGER, ceil.type()); + assertTrue(ceil.valueOf(valueEnv()).isNull()); + + FunctionExpression ceiling = dsl.ceiling(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(INTEGER, ceiling.type()); + assertTrue(ceiling.valueOf(valueEnv()).isNull()); + } + + /** + * Test ceil/ceiling with missing value. + */ + @Test + public void ceil_missing_value() { + FunctionExpression ceil = dsl.ceil(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(INTEGER, ceil.type()); + assertTrue(ceil.valueOf(valueEnv()).isMissing()); + + FunctionExpression ceiling = dsl.ceiling(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(INTEGER, ceiling.type()); + assertTrue(ceiling.valueOf(valueEnv()).isMissing()); + } + + /** + * Test conv from decimal base with string as a number. + */ + @ParameterizedTest(name = "conv({0})") + @ValueSource(strings = {"1", "0", "-1"}) + public void conv_from_decimal(String value) { + FunctionExpression conv = dsl.conv(DSL.literal(value), DSL.literal(10), DSL.literal(2)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value), 2)))); + assertEquals(String.format("conv(\"%s\", 10, 2)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(10), DSL.literal(8)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value), 8)))); + assertEquals(String.format("conv(\"%s\", 10, 8)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(10), DSL.literal(16)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value), 16)))); + assertEquals(String.format("conv(\"%s\", 10, 16)", value), conv.toString()); + } + + /** + * Test conv from decimal base with integer as a number. + */ + @ParameterizedTest(name = "conv({0})") + @ValueSource(ints = {1, 0, -1}) + public void conv_from_decimal(Integer value) { + FunctionExpression conv = dsl.conv(DSL.literal(value), DSL.literal(10), DSL.literal(2)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(value, 2)))); + assertEquals(String.format("conv(%s, 10, 2)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(10), DSL.literal(8)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(value, 8)))); + assertEquals(String.format("conv(%s, 10, 8)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(10), DSL.literal(16)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(value, 16)))); + assertEquals(String.format("conv(%s, 10, 16)", value), conv.toString()); + } + + /** + * Test conv to decimal base with string as a number. + */ + @ParameterizedTest(name = "conv({0})") + @ValueSource(strings = {"11", "0", "11111"}) + public void conv_to_decimal(String value) { + FunctionExpression conv = dsl.conv(DSL.literal(value), DSL.literal(2), DSL.literal(10)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value, 2))))); + assertEquals(String.format("conv(\"%s\", 2, 10)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(8), DSL.literal(10)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value, 8))))); + assertEquals(String.format("conv(\"%s\", 8, 10)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(16), DSL.literal(10)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value, 16))))); + assertEquals(String.format("conv(\"%s\", 16, 10)", value), conv.toString()); + } + + /** + * Test conv to decimal base with integer as a number. + */ + @ParameterizedTest(name = "conv({0})") + @ValueSource(ints = {11, 0, 11111}) + public void conv_to_decimal(Integer value) { + FunctionExpression conv = dsl.conv(DSL.literal(value), DSL.literal(2), DSL.literal(10)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value.toString(), 2))))); + assertEquals(String.format("conv(%s, 2, 10)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(8), DSL.literal(10)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value.toString(), 8))))); + assertEquals(String.format("conv(%s, 8, 10)", value), conv.toString()); + + conv = dsl.conv(DSL.literal(value), DSL.literal(16), DSL.literal(10)); + assertThat( + conv.valueOf(valueEnv()), + allOf(hasType(STRING), hasValue(Integer.toString(Integer.parseInt(value.toString(), 16))))); + assertEquals(String.format("conv(%s, 16, 10)", value), conv.toString()); + } + + /** + * Test conv with null value. + */ + @Test + public void conv_null_value() { + FunctionExpression conv = dsl.conv( + DSL.ref(STRING_TYPE_NULL_VALUE_FILED, STRING), DSL.literal(10), DSL.literal(2)); + assertEquals(STRING, conv.type()); + assertTrue(conv.valueOf(valueEnv()).isNull()); + + conv = dsl.conv( + DSL.literal("1"), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.literal(2)); + assertEquals(STRING, conv.type()); + assertTrue(conv.valueOf(valueEnv()).isNull()); + + conv = dsl.conv( + DSL.literal("1"), DSL.literal(10), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(STRING, conv.type()); + assertTrue(conv.valueOf(valueEnv()).isNull()); + } + + /** + * Test conv with missing value. + */ + @Test + public void conv_missing_value() { + FunctionExpression conv = dsl.conv( + DSL.ref(STRING_TYPE_MISSING_VALUE_FILED, STRING), DSL.literal(10), DSL.literal(2)); + assertEquals(STRING, conv.type()); + assertTrue(conv.valueOf(valueEnv()).isMissing()); + + conv = dsl.conv( + DSL.literal("1"), DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), DSL.literal(2)); + assertEquals(STRING, conv.type()); + assertTrue(conv.valueOf(valueEnv()).isMissing()); + + conv = dsl.conv( + DSL.literal("1"), DSL.literal(10), DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(STRING, conv.type()); + assertTrue(conv.valueOf(valueEnv()).isMissing()); + } + + /** + * Test conv with null and missing values. + */ + @Test + public void conv_null_missing() { + FunctionExpression conv = dsl.conv(DSL.ref(STRING_TYPE_MISSING_VALUE_FILED, STRING), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), DSL.literal(2)); + assertEquals(STRING, conv.type()); + assertTrue(conv.valueOf(valueEnv()).isMissing()); + } + + /** + * Test crc32 with string value. + */ + @ParameterizedTest(name = "crc({0})") + @ValueSource(strings = {"odfe", "sql"}) + public void crc32_string_value(String value) { + FunctionExpression crc = dsl.crc32(DSL.literal(value)); + CRC32 crc32 = new CRC32(); + crc32.update(value.getBytes()); + assertThat( + crc.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue(crc32.getValue()))); + assertEquals(String.format("crc32(\"%s\")", value), crc.toString()); + } + + /** + * Test crc32 with null value. + */ + @Test + public void crc32_null_value() { + FunctionExpression crc = dsl.crc32(DSL.ref(STRING_TYPE_NULL_VALUE_FILED, STRING)); + assertEquals(LONG, crc.type()); + assertTrue(crc.valueOf(valueEnv()).isNull()); + } + + /** + * Test crc32 with missing value. + */ + @Test + public void crc32_missing_value() { + FunctionExpression crc = dsl.crc32(DSL.ref(STRING_TYPE_MISSING_VALUE_FILED, STRING)); + assertEquals(LONG, crc.type()); + assertTrue(crc.valueOf(valueEnv()).isMissing()); + } + + /** + * Test constant e. + */ + @Test + public void test_e() { + FunctionExpression e = dsl.euler(); + assertThat(e.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.E))); + } + + /** + * Test exp with integer value. + */ + @ParameterizedTest(name = "exp({0})") + @ValueSource(ints = {-2, 2}) + public void exp_int_value(Integer value) { + FunctionExpression exp = dsl.exp(DSL.literal(value)); + assertThat( + exp.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); + } + + /** + * Test exp with long value. + */ + @ParameterizedTest(name = "exp({0})") + @ValueSource(longs = {-2L, 2L}) + public void exp_long_value(Long value) { + FunctionExpression exp = dsl.exp(DSL.literal(value)); + assertThat( + exp.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); + } + + /** + * Test exp with float value. + */ + @ParameterizedTest(name = "exp({0})") + @ValueSource(floats = {-2F, 2F}) + public void exp_float_value(Float value) { + FunctionExpression exp = dsl.exp(DSL.literal(value)); + assertThat( + exp.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); + } + + /** + * Test exp with double value. + */ + @ParameterizedTest(name = "exp({0})") + @ValueSource(doubles = {-2D, 2D}) + public void exp_double_value(Double value) { + FunctionExpression exp = dsl.exp(DSL.literal(value)); + assertThat( + exp.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.exp(value)))); + assertEquals(String.format("exp(%s)", value.toString()), exp.toString()); + } + + /** + * Test exp with null value. + */ + @Test + public void exp_null_value() { + FunctionExpression exp = dsl.exp(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, exp.type()); + assertTrue(exp.valueOf(valueEnv()).isNull()); + } + + /** + * Test exp with missing value. + */ + @Test + public void exp_missing_value() { + FunctionExpression exp = dsl.exp(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, exp.type()); + assertTrue(exp.valueOf(valueEnv()).isMissing()); + } + + /** + * Test floor with integer value. + */ + @ParameterizedTest(name = "floor({0})") + @ValueSource(ints = {-2, 2}) + public void floor_int_value(Integer value) { + FunctionExpression floor = dsl.floor(DSL.literal(value)); + assertThat( + floor.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.floor(value)))); + assertEquals(String.format("floor(%s)", value.toString()), floor.toString()); + } + + /** + * Test floor with long value. + */ + @ParameterizedTest(name = "floor({0})") + @ValueSource(longs = {-2L, 2L}) + public void floor_long_value(Long value) { + FunctionExpression floor = dsl.floor(DSL.literal(value)); + assertThat( + floor.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.floor(value)))); + assertEquals(String.format("floor(%s)", value.toString()), floor.toString()); + } + + /** + * Test floor with float value. + */ + @ParameterizedTest(name = "floor({0})") + @ValueSource(floats = {-2F, 2F}) + public void floor_float_value(Float value) { + FunctionExpression floor = dsl.floor(DSL.literal(value)); + assertThat( + floor.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.floor(value)))); + assertEquals(String.format("floor(%s)", value.toString()), floor.toString()); + } + + /** + * Test floor with double value. + */ + @ParameterizedTest(name = "floor({0})") + @ValueSource(doubles = {-2D, 2D}) + public void floor_double_value(Double value) { + FunctionExpression floor = dsl.floor(DSL.literal(value)); + assertThat( + floor.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.floor(value)))); + assertEquals(String.format("floor(%s)", value.toString()), floor.toString()); + } + + /** + * Test floor with null value. + */ + @Test + public void floor_null_value() { + FunctionExpression floor = dsl.floor(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(INTEGER, floor.type()); + assertTrue(floor.valueOf(valueEnv()).isNull()); + } + + /** + * Test floor with missing value. + */ + @Test + public void floor_missing_value() { + FunctionExpression floor = dsl.floor(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(INTEGER, floor.type()); + assertTrue(floor.valueOf(valueEnv()).isMissing()); + } + + /** + * Test ln with integer value. + */ + @ParameterizedTest(name = "ln({0})") + @ValueSource(ints = {2, -2}) + public void ln_int_value(Integer value) { + FunctionExpression ln = dsl.ln(DSL.literal(value)); + assertThat( + ln.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); + } + + /** + * Test ln with long value. + */ + @ParameterizedTest(name = "ln({0})") + @ValueSource(longs = {2L, -2L}) + public void ln_long_value(Long value) { + FunctionExpression ln = dsl.ln(DSL.literal(value)); + assertThat( + ln.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); + } + + /** + * Test ln with float value. + */ + @ParameterizedTest(name = "ln({0})") + @ValueSource(floats = {2F, -2F}) + public void ln_float_value(Float value) { + FunctionExpression ln = dsl.ln(DSL.literal(value)); + assertThat( + ln.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); + } + + /** + * Test ln with double value. + */ + @ParameterizedTest(name = "ln({0})") + @ValueSource(doubles = {2D, -2D}) + public void ln_double_value(Double value) { + FunctionExpression ln = dsl.ln(DSL.literal(value)); + assertThat( + ln.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.log(value)))); + assertEquals(String.format("ln(%s)", value.toString()), ln.toString()); + } + + /** + * Test ln with null value. + */ + @Test + public void ln_null_value() { + FunctionExpression ln = dsl.ln(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, ln.type()); + assertTrue(ln.valueOf(valueEnv()).isNull()); + } + + /** + * Test ln with missing value. + */ + @Test + public void ln_missing_value() { + FunctionExpression ln = dsl.ln(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, ln.type()); + assertTrue(ln.valueOf(valueEnv()).isMissing()); + } + + /** + * Test log with 1 int argument. + */ + @ParameterizedTest(name = "log({0})") + @ValueSource(ints = {2, 3}) + public void log_int_value(Integer v) { + FunctionExpression log = dsl.log(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v), 0.0001) + ); + assertEquals(String.format("log(%s)", v.toString()), log.toString()); + } + + /** + * Test log with 1 long argument. + */ + @ParameterizedTest(name = "log({0})") + @ValueSource(longs = {2L, 3L}) + public void log_int_value(Long v) { + FunctionExpression log = dsl.log(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v), 0.0001) + ); + assertEquals(String.format("log(%s)", v.toString()), log.toString()); + } + + /** + * Test log with 1 float argument. + */ + @ParameterizedTest(name = "log({0})") + @ValueSource(floats = {2F, 3F}) + public void log_float_value(Float v) { + FunctionExpression log = dsl.log(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v), 0.0001) + ); + assertEquals(String.format("log(%s)", v.toString()), log.toString()); + } + + /** + * Test log with 1 double argument. + */ + @ParameterizedTest(name = "log({0})") + @ValueSource(doubles = {2D, 3D}) + public void log_double_value(Double v) { + FunctionExpression log = dsl.log(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v), 0.0001) + ); + assertEquals(String.format("log(%s)", v.toString()), log.toString()); + } + + /** + * Test log with 1 null value argument. + */ + @Test + public void log_null_value() { + FunctionExpression log = dsl.log( + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isNull()); + } + + /** + * Test log with 1 missing value argument. + */ + @Test + public void log_missing_value() { + FunctionExpression log = dsl.log( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + } + + /** + * Test log with 2 int arguments. + */ + @ParameterizedTest(name = "log({0}, {1})") + @MethodSource("testLogIntegerArguments") + public void log_two_int_value(Integer v1, Integer v2) { + FunctionExpression log = dsl.log(DSL.literal(v1), DSL.literal(v2)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); + } + + /** + * Test log with 2 long arguments. + */ + @ParameterizedTest(name = "log({0}, {1})") + @MethodSource("testLogLongArguments") + public void log_two_long_value(Long v1, Long v2) { + FunctionExpression log = dsl.log(DSL.literal(v1), DSL.literal(v2)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); + } + + /** + * Test log with 2 float arguments. + */ + @ParameterizedTest(name = "log({0}, {1})") + @MethodSource("testLogFloatArguments") + public void log_two_double_value(Float v1, Float v2) { + FunctionExpression log = dsl.log(DSL.literal(v1), DSL.literal(v2)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); + } + + /** + * Test log with 2 double arguments. + */ + @ParameterizedTest(name = "log({0}, {1})") + @MethodSource("testLogDoubleArguments") + public void log_two_double_value(Double v1, Double v2) { + FunctionExpression log = dsl.log(DSL.literal(v1), DSL.literal(v2)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v2) / Math.log(v1), 0.0001)); + assertEquals(String.format("log(%s, %s)", v1.toString(), v2.toString()), log.toString()); + } + + /** + * Test log with 2 null value arguments. + */ + @Test + public void log_two_null_value() { + FunctionExpression log = dsl.log( + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), DSL.literal(2D)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isNull()); + + log = dsl.log(DSL.literal(2D), DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isNull()); + + log = dsl.log( + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isNull()); + } + + /** + * Test log with 2 missing value arguments. + */ + @Test + public void log_two_missing_value() { + FunctionExpression log = dsl.log( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), DSL.literal(2D)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + + log = dsl.log(DSL.literal(2D), DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + + log = dsl.log( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + } + + /** + * Test log with null and missing value arguments. + */ + @Test + public void log_null_missing() { + FunctionExpression log = dsl.log( + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + + log = dsl.log( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + } + + /** + * Test log10 with int value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(ints = {2, 3}) + public void log10_int_value(Integer v) { + FunctionExpression log = dsl.log10(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log10(v), 0.0001) + ); + assertEquals(String.format("log10(%s)", v.toString()), log.toString()); + } + + /** + * Test log10 with long value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(longs = {2L, 3L}) + public void log10_long_value(Long v) { + FunctionExpression log = dsl.log10(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log10(v), 0.0001) + ); + assertEquals(String.format("log10(%s)", v.toString()), log.toString()); + } + + /** + * Test log10 with float value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(floats = {2F, 3F}) + public void log10_float_value(Float v) { + FunctionExpression log = dsl.log10(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log10(v), 0.0001) + ); + assertEquals(String.format("log10(%s)", v.toString()), log.toString()); + } + + /** + * Test log10 with int value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(doubles = {2D, 3D}) + public void log10_double_value(Double v) { + FunctionExpression log = dsl.log10(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log10(v), 0.0001) + ); + assertEquals(String.format("log10(%s)", v.toString()), log.toString()); + } + + /** + * Test log10 with null value. + */ + @Test + public void log10_null_value() { + FunctionExpression log = dsl.log10( + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isNull()); + } + + /** + * Test log10 with missing value. + */ + @Test + public void log10_missing_value() { + FunctionExpression log = dsl.log10( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + } + + /** + * Test log2 with int value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(ints = {2, 3}) + public void log2_int_value(Integer v) { + FunctionExpression log = dsl.log2(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v) / Math.log(2), 0.0001) + ); + assertEquals(String.format("log2(%s)", v.toString()), log.toString()); + } + + /** + * Test log2 with long value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(longs = {2L, 3L}) + public void log2_long_value(Long v) { + FunctionExpression log = dsl.log2(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v) / Math.log(2), 0.0001) + ); + assertEquals(String.format("log2(%s)", v.toString()), log.toString()); + } + + /** + * Test log2 with float value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(floats = {2F, 3F}) + public void log2_float_value(Float v) { + FunctionExpression log = dsl.log2(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v) / Math.log(2), 0.0001) + ); + assertEquals(String.format("log2(%s)", v.toString()), log.toString()); + } + + /** + * Test log2 with double value. + */ + @ParameterizedTest(name = "log10({0})") + @ValueSource(doubles = {2D, 3D}) + public void log2_double_value(Double v) { + FunctionExpression log = dsl.log2(DSL.literal(v)); + assertEquals(log.type(), DOUBLE); + assertThat( + getDoubleValue(log.valueOf(valueEnv())), + closeTo(Math.log(v) / Math.log(2), 0.0001) + ); + assertEquals(String.format("log2(%s)", v.toString()), log.toString()); + } + + /** + * Test log2 with null value. + */ + @Test + public void log2_null_value() { + FunctionExpression log = dsl.log2( + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isNull()); + } + + /** + * Test log2 with missing value. + */ + @Test + public void log2_missing_value() { + FunctionExpression log = dsl.log2( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, log.type()); + assertTrue(log.valueOf(valueEnv()).isMissing()); + } + + /** + * Test mod with integer value. + */ + @ParameterizedTest(name = "mod({0}, {1})") + @MethodSource("testLogIntegerArguments") + public void mod_int_value(Integer v1, Integer v2) { + FunctionExpression mod = dsl.mod(DSL.literal(v1), DSL.literal(v2)); + assertThat( + mod.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue(v1 % v2))); + assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); + + mod = dsl.mod(DSL.literal(v1), DSL.literal(0)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isNull()); + } + + /** + * Test mod with long value. + */ + @ParameterizedTest(name = "mod({0}, {1})") + @MethodSource("testLogLongArguments") + public void mod_long_value(Long v1, Long v2) { + FunctionExpression mod = dsl.mod(DSL.literal(v1), DSL.literal(v2)); + assertThat( + mod.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue(v1 % v2))); + assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); + + mod = dsl.mod(DSL.literal(v1), DSL.literal(0)); + assertEquals(LONG, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isNull()); + } + + /** + * Test mod with long value. + */ + @ParameterizedTest(name = "mod({0}, {1})") + @MethodSource("testLogFloatArguments") + public void mod_float_value(Float v1, Float v2) { + FunctionExpression mod = dsl.mod(DSL.literal(v1), DSL.literal(v2)); + assertThat( + mod.valueOf(valueEnv()), + allOf(hasType(FLOAT), hasValue(v1 % v2))); + assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); + + mod = dsl.mod(DSL.literal(v1), DSL.literal(0)); + assertEquals(FLOAT, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isNull()); + } + + /** + * Test mod with double value. + */ + @ParameterizedTest(name = "mod({0}, {1})") + @MethodSource("testLogDoubleArguments") + public void mod_double_value(Double v1, Double v2) { + FunctionExpression mod = dsl.mod(DSL.literal(v1), DSL.literal(v2)); + assertThat( + mod.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(v1 % v2))); + assertEquals(String.format("mod(%s, %s)", v1, v2), mod.toString()); + + mod = dsl.mod(DSL.literal(v1), DSL.literal(0)); + assertEquals(DOUBLE, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isNull()); + } + + /** + * Test mod with null value. + */ + @Test + public void mod_null_value() { + FunctionExpression mod = dsl.mod(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isNull()); + + mod = dsl.mod(DSL.literal(1), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isNull()); + + mod = dsl.mod( + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isNull()); + } + + /** + * Test mod with missing value. + */ + @Test + public void mod_missing_value() { + FunctionExpression mod = + dsl.mod(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isMissing()); + + mod = dsl.mod(DSL.literal(1), DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isMissing()); + + mod = dsl.mod( + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isMissing()); + } + + /** + * Test mod with null and missing values. + */ + @Test + public void mod_null_missing() { + FunctionExpression mod = dsl.mod(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isMissing()); + + mod = dsl.mod(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, mod.type()); + assertTrue(mod.valueOf(valueEnv()).isMissing()); + } + + /** + * Test pow/power with integer value. + */ + @ParameterizedTest(name = "pow({0}, {1}") + @MethodSource("testLogIntegerArguments") + public void pow_int_value(Integer v1, Integer v2) { + FunctionExpression pow = dsl.pow(DSL.literal(v1), DSL.literal(v2)); + assertThat( + pow.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + + FunctionExpression power = dsl.power(DSL.literal(v1), DSL.literal(v2)); + assertThat( + power.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + } + + /** + * Test pow/power with long value. + */ + @ParameterizedTest(name = "pow({0}, {1}") + @MethodSource("testLogLongArguments") + public void pow_long_value(Long v1, Long v2) { + FunctionExpression pow = dsl.pow(DSL.literal(v1), DSL.literal(v2)); + assertThat( + pow.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + + FunctionExpression power = dsl.power(DSL.literal(v1), DSL.literal(v2)); + assertThat( + power.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + } + + /** + * Test pow/power with float value. + */ + @ParameterizedTest(name = "pow({0}, {1}") + @MethodSource("testLogFloatArguments") + public void pow_float_value(Float v1, Float v2) { + FunctionExpression pow = dsl.pow(DSL.literal(v1), DSL.literal(v2)); + assertThat( + pow.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + + FunctionExpression power = dsl.power(DSL.literal(v1), DSL.literal(v2)); + assertThat( + power.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + } + + /** + * Test pow/power with double value. + */ + @ParameterizedTest(name = "pow({0}, {1}") + @MethodSource("testLogDoubleArguments") + public void pow_double_value(Double v1, Double v2) { + FunctionExpression pow = dsl.pow(DSL.literal(v1), DSL.literal(v2)); + assertThat( + pow.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + + FunctionExpression power = dsl.power(DSL.literal(v1), DSL.literal(v2)); + assertThat( + power.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.pow(v1, v2)))); + assertEquals(String.format("pow(%s, %s)", v1, v2), pow.toString()); + } + + /** + * Test pow/power with null value. + */ + @Test + public void pow_null_value() { + FunctionExpression pow = dsl.pow(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isNull()); + + dsl.pow(DSL.literal(1), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isNull()); + + dsl.pow( + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isNull()); + + FunctionExpression power = + dsl.power(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isNull()); + + power = dsl.power(DSL.literal(1), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isNull()); + + power = dsl.power( + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isNull()); + } + + /** + * Test pow/power with missing value. + */ + @Test + public void pow_missing_value() { + FunctionExpression pow = + dsl.pow(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isMissing()); + + dsl.pow(DSL.literal(1), DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isMissing()); + + dsl.pow(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isMissing()); + + FunctionExpression power = + dsl.power(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isMissing()); + + power = dsl.power(DSL.literal(1), DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isMissing()); + + power = dsl.power(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isMissing()); + } + + /** + * Test pow/power with null and missing values. + */ + @Test + public void pow_null_missing() { + FunctionExpression pow = dsl.pow( + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isMissing()); + + pow = dsl.pow( + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, pow.type()); + assertTrue(pow.valueOf(valueEnv()).isMissing()); + + FunctionExpression power = dsl.power( + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isMissing()); + + power = dsl.power( + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, power.type()); + assertTrue(power.valueOf(valueEnv()).isMissing()); + } + + /** + * Test round with integer value. + */ + @ParameterizedTest(name = "round({0}") + @ValueSource(ints = {21, -21}) + public void round_int_value(Integer value) { + FunctionExpression round = dsl.round(DSL.literal(value)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue((long) Math.round(value)))); + assertEquals(String.format("round(%s)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue( + new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).longValue()))); + assertEquals(String.format("round(%s, 1)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(-1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue( + new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).longValue()))); + assertEquals(String.format("round(%s, -1)", value), round.toString()); + } + + /** + * Test round with long value. + */ + @ParameterizedTest(name = "round({0}") + @ValueSource(longs = {21L, -21L}) + public void round_long_value(Long value) { + FunctionExpression round = dsl.round(DSL.literal(value)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue((long) Math.round(value)))); + assertEquals(String.format("round(%s)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue( + new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).longValue()))); + assertEquals(String.format("round(%s, 1)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(-1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(LONG), hasValue( + new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).longValue()))); + assertEquals(String.format("round(%s, -1)", value), round.toString()); + } + + /** + * Test round with float value. + */ + @ParameterizedTest(name = "round({0}") + @ValueSource(floats = {21F, -21F}) + public void round_float_value(Float value) { + FunctionExpression round = dsl.round(DSL.literal(value)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue((double) Math.round(value)))); + assertEquals(String.format("round(%s)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue( + new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).doubleValue()))); + assertEquals(String.format("round(%s, 1)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(-1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue( + new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).doubleValue()))); + assertEquals(String.format("round(%s, -1)", value), round.toString()); + } + + /** + * Test round with double value. + */ + @ParameterizedTest(name = "round({0}") + @ValueSource(doubles = {21D, -21D}) + public void round_double_value(Double value) { + FunctionExpression round = dsl.round(DSL.literal(value)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue((double) Math.round(value)))); + assertEquals(String.format("round(%s)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue( + new BigDecimal(value).setScale(1, RoundingMode.HALF_UP).doubleValue()))); + assertEquals(String.format("round(%s, 1)", value), round.toString()); + + round = dsl.round(DSL.literal(value), DSL.literal(-1)); + assertThat( + round.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue( + new BigDecimal(value).setScale(-1, RoundingMode.HALF_UP).doubleValue()))); + assertEquals(String.format("round(%s, -1)", value), round.toString()); + } + + /** + * Test round with null value. + */ + @Test + public void round_null_value() { + FunctionExpression round = dsl.round(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isNull()); + + round = dsl.round(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isNull()); + + round = dsl.round(DSL.literal(1), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isNull()); + } + + /** + * Test round with null value. + */ + @Test + public void round_missing_value() { + FunctionExpression round = dsl.round(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isMissing()); + + round = dsl.round(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isMissing()); + + round = dsl.round(DSL.literal(1), DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isMissing()); + } + + /** + * Test round with null and missing values. + */ + @Test + public void round_null_missing() { + FunctionExpression round = dsl.round( + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isMissing()); + + round = dsl.round( + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(LONG, round.type()); + assertTrue(round.valueOf(valueEnv()).isMissing()); + } + + /** + * Test sign with integer value. + */ + @ParameterizedTest(name = "sign({0})") + @ValueSource(ints = {2, -2}) + public void sign_int_value(Integer value) { + FunctionExpression sign = dsl.sign(DSL.literal(value)); + assertThat( + sign.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + assertEquals(String.format("sign(%s)", value), sign.toString()); + } + + /** + * Test sign with long value. + */ + @ParameterizedTest(name = "sign({0})") + @ValueSource(longs = {2L, -2L}) + public void sign_long_value(Long value) { + FunctionExpression sign = dsl.sign(DSL.literal(value)); + assertThat( + sign.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + assertEquals(String.format("sign(%s)", value), sign.toString()); + } + + /** + * Test sign with float value. + */ + @ParameterizedTest(name = "sign({0})") + @ValueSource(floats = {2F, -2F}) + public void sign_float_value(Float value) { + FunctionExpression sign = dsl.sign(DSL.literal(value)); + assertThat( + sign.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + assertEquals(String.format("sign(%s)", value), sign.toString()); + } + + /** + * Test sign with double value. + */ + @ParameterizedTest(name = "sign({0})") + @ValueSource(doubles = {2, -2}) + public void sign_double_value(Double value) { + FunctionExpression sign = dsl.sign(DSL.literal(value)); + assertThat( + sign.valueOf(valueEnv()), + allOf(hasType(INTEGER), hasValue((int) Math.signum(value)))); + assertEquals(String.format("sign(%s)", value), sign.toString()); + } + + /** + * Test sign with null value. + */ + @Test + public void sign_null_value() { + FunctionExpression sign = dsl.sign(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, sign.type()); + assertTrue(sign.valueOf(valueEnv()).isNull()); + } + + /** + * Test sign with missing value. + */ + @Test + public void sign_missing_value() { + FunctionExpression sign = dsl.sign(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(INTEGER, sign.type()); + assertTrue(sign.valueOf(valueEnv()).isMissing()); + } + + /** + * Test sqrt with int value. + */ + @ParameterizedTest(name = "sqrt({0})") + @ValueSource(ints = {1, 2}) + public void sqrt_int_value(Integer value) { + FunctionExpression sqrt = dsl.sqrt(DSL.literal(value)); + assertThat(sqrt.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.sqrt(value)))); + assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); + } + + /** + * Test sqrt with long value. + */ + @ParameterizedTest(name = "sqrt({0})") + @ValueSource(longs = {1L, 2L}) + public void sqrt_long_value(Long value) { + FunctionExpression sqrt = dsl.sqrt(DSL.literal(value)); + assertThat(sqrt.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.sqrt(value)))); + assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); + } + + /** + * Test sqrt with float value. + */ + @ParameterizedTest(name = "sqrt({0})") + @ValueSource(floats = {1F, 2F}) + public void sqrt_float_value(Float value) { + FunctionExpression sqrt = dsl.sqrt(DSL.literal(value)); + assertThat(sqrt.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.sqrt(value)))); + assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); + } + + /** + * Test sqrt with double value. + */ + @ParameterizedTest(name = "sqrt({0})") + @ValueSource(doubles = {1D, 2D}) + public void sqrt_double_value(Double value) { + FunctionExpression sqrt = dsl.sqrt(DSL.literal(value)); + assertThat(sqrt.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.sqrt(value)))); + assertEquals(String.format("sqrt(%s)", value), sqrt.toString()); + } + + /** + * Test sqrt with negative value. + */ + @ParameterizedTest(name = "sqrt({0})") + @ValueSource(doubles = {-1D, -2D}) + public void sqrt_negative_value(Double value) { + FunctionExpression sqrt = dsl.sqrt(DSL.literal(value)); + assertEquals(DOUBLE, sqrt.type()); + assertTrue(sqrt.valueOf(valueEnv()).isNull()); + } + + /** + * Test sqrt with null value. + */ + @Test + public void sqrt_null_value() { + FunctionExpression sqrt = dsl.sqrt(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, sqrt.type()); + assertTrue(sqrt.valueOf(valueEnv()).isNull()); + } + + /** + * Test sqrt with missing value. + */ + @Test + public void sqrt_missing_value() { + FunctionExpression sqrt = dsl.sqrt(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(DOUBLE, sqrt.type()); + assertTrue(sqrt.valueOf(valueEnv()).isMissing()); + } + + /** + * Test truncate with integer value. + */ + @ParameterizedTest(name = "truncate({0}, {1})") + @ValueSource(ints = {2, -2}) + public void truncate_int_value(Integer value) { + FunctionExpression truncate = dsl.truncate(DSL.literal(value), DSL.literal(1)); + assertThat( + truncate.valueOf(valueEnv()), allOf(hasType(LONG), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.DOWN).longValue()))); + assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); + } + + /** + * Test truncate with long value. + */ + @ParameterizedTest(name = "truncate({0}, {1})") + @ValueSource(longs = {2L, -2L}) + public void truncate_long_value(Long value) { + FunctionExpression truncate = dsl.truncate(DSL.literal(value), DSL.literal(1)); + assertThat( + truncate.valueOf(valueEnv()), allOf(hasType(LONG), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.DOWN).longValue()))); + assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); + } + + /** + * Test truncate with float value. + */ + @ParameterizedTest(name = "truncate({0}, {1})") + @ValueSource(floats = {2F, -2F}) + public void truncate_float_value(Float value) { + FunctionExpression truncate = dsl.truncate(DSL.literal(value), DSL.literal(1)); + assertThat( + truncate.valueOf(valueEnv()), allOf(hasType(DOUBLE), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.DOWN).doubleValue()))); + assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); + } + + /** + * Test truncate with double value. + */ + @ParameterizedTest(name = "truncate({0}, {1})") + @ValueSource(doubles = {2D, -2D}) + public void truncate_double_value(Double value) { + FunctionExpression truncate = dsl.truncate(DSL.literal(value), DSL.literal(1)); + assertThat( + truncate.valueOf(valueEnv()), allOf(hasType(DOUBLE), + hasValue(new BigDecimal(value).setScale(1, RoundingMode.DOWN).doubleValue()))); + assertEquals(String.format("truncate(%s, 1)", value), truncate.toString()); + } + + /** + * Test truncate with null value. + */ + @Test + public void truncate_null_value() { + FunctionExpression truncate = + dsl.truncate(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isNull()); + + truncate = dsl.truncate(DSL.literal(1), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isNull()); + + truncate = dsl.truncate( + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isNull()); + } + + /** + * Test truncate with missing value. + */ + @Test + public void truncate_missing_value() { + FunctionExpression truncate = + dsl.truncate(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), DSL.literal(1)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isMissing()); + + truncate = dsl.truncate(DSL.literal(1), DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isMissing()); + + truncate = dsl.truncate( + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isMissing()); + } + + /** + * Test truncate with null and missing values. + */ + @Test + public void truncate_null_missing() { + FunctionExpression truncate = dsl.truncate(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isMissing()); + + truncate = dsl.truncate(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER), + DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(LONG, truncate.type()); + assertTrue(truncate.valueOf(valueEnv()).isMissing()); + } + + /** + * Test constant pi. + */ + @Test + public void test_pi() { + FunctionExpression pi = dsl.pi(); + assertThat(pi.valueOf(valueEnv()), allOf(hasType(DOUBLE), hasValue(Math.PI))); + } + + /** + * Test rand with no argument. + */ + @Test + public void rand_no_arg() { + FunctionExpression rand = dsl.rand(); + assertEquals(FLOAT, rand.type()); + assertTrue( + getFloatValue(rand.valueOf(valueEnv())) >= 0 + && getFloatValue(rand.valueOf(valueEnv())) < 1); + assertEquals("rand()", rand.toString()); + } + + /** + * Test rand with integer value. + */ + @ParameterizedTest(name = "rand({0})") + @ValueSource(ints = {2, 3}) + public void rand_int_value(Integer n) { + FunctionExpression rand = dsl.rand(DSL.literal(n)); + assertEquals(FLOAT, rand.type()); + assertTrue( + getFloatValue(rand.valueOf(valueEnv())) >= 0 + && getFloatValue(rand.valueOf(valueEnv())) < 1); + assertEquals(getFloatValue(rand.valueOf(valueEnv())), new Random(n).nextFloat()); + assertEquals(String.format("rand(%s)", n), rand.toString()); + } + + @Test + public void rand_null_value() { + FunctionExpression rand = dsl.rand(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)); + assertEquals(FLOAT, rand.type()); + assertTrue(rand.valueOf(valueEnv()).isNull()); + } + + /** + * Test acos with integer, long, float, double values. + */ + @ParameterizedTest(name = "acos({0})") + @MethodSource("trigonometricArguments") + public void test_acos(Number value) { + FunctionExpression acos = dsl.acos(DSL.literal(value)); + assertThat( + acos.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.acos(value.doubleValue())))); + assertEquals(String.format("acos(%s)", value), acos.toString()); + } + + /** + * Test acos with illegal values. + */ + @ParameterizedTest(name = "acos({0})") + @ValueSource(doubles = {2D, -2D}) + public void acos_with_illegal_value(Number value) { + FunctionExpression acos = dsl.acos(DSL.literal(value)); + assertEquals(DOUBLE, acos.type()); + assertTrue(acos.valueOf(valueEnv()).isNull()); + } + + /** + * Test acos with null value. + */ + @Test + public void acos_null_value() { + FunctionExpression acos = dsl.acos(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, acos.type()); + assertTrue(acos.valueOf(valueEnv()).isNull()); + } + + /** + * Test acos with missing value. + */ + @Test + public void acos_missing_value() { + FunctionExpression acos = dsl.acos(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, acos.type()); + assertTrue(acos.valueOf(valueEnv()).isMissing()); + } + + /** + * Test asin with integer, long, float, double values. + */ + @ParameterizedTest(name = "asin({0})") + @MethodSource("trigonometricArguments") + public void test_asin(Number value) { + FunctionExpression asin = dsl.asin(DSL.literal(value)); + assertThat( + asin.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.asin(value.doubleValue())))); + assertEquals(String.format("asin(%s)", value), asin.toString()); + } + + /** + * Test asin with illegal value. + */ + @ParameterizedTest(name = "asin({0})") + @ValueSource(doubles = {2D, -2D}) + public void asin_with_illegal_value(Number value) { + FunctionExpression asin = dsl.asin(DSL.literal(value)); + assertEquals(DOUBLE, asin.type()); + assertTrue(asin.valueOf(valueEnv()).isNull()); + } + + /** + * Test asin with null value. + */ + @Test + public void asin_null_value() { + FunctionExpression asin = dsl.asin(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, asin.type()); + assertTrue(asin.valueOf(valueEnv()).isNull()); + } + + /** + * Test asin with missing value. + */ + @Test + public void asin_missing_value() { + FunctionExpression asin = dsl.asin(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, asin.type()); + assertTrue(asin.valueOf(valueEnv()).isMissing()); + } + + /** + * Test atan with one argument integer, long, float, double values. + */ + @ParameterizedTest(name = "atan({0})") + @MethodSource("trigonometricArguments") + public void atan_one_arg(Number value) { + FunctionExpression atan = dsl.atan(DSL.literal(value)); + assertThat( + atan.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.atan(value.doubleValue())))); + assertEquals(String.format("atan(%s)", value), atan.toString()); + } + + /** + * Test atan with two arguments of integer, long, float, double values. + */ + @ParameterizedTest(name = "atan({0}, {1})") + @MethodSource("trigonometricDoubleArguments") + public void atan_two_args(Number v1, Number v2) { + FunctionExpression atan = + dsl.atan(DSL.literal(v1), DSL.literal(v2)); + assertThat( + atan.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.atan2(v1.doubleValue(), v2.doubleValue())))); + assertEquals(String.format("atan(%s, %s)", v1, v2), atan.toString()); + } + + /** + * Test atan with null value. + */ + @Test + public void atan_null_value() { + FunctionExpression atan = dsl.atan(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isNull()); + + atan = dsl.atan(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), DSL.literal(1)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isNull()); + + atan = dsl.atan(DSL.literal(1), DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isNull()); + + atan = dsl.atan(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isNull()); + } + + /** + * Test atan with missing value. + */ + @Test + public void atan_missing_value() { + FunctionExpression atan = dsl.atan(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isMissing()); + + atan = dsl.atan(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), DSL.literal(1)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isMissing()); + + atan = dsl.atan(DSL.literal(1), DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isMissing()); + + atan = dsl.atan(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isMissing()); + } + + /** + * Test atan with missing value. + */ + @Test + public void atan_null_missing() { + FunctionExpression atan = dsl.atan( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isMissing()); + + atan = dsl.atan(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan.type()); + assertTrue(atan.valueOf(valueEnv()).isMissing()); + } + + /** + * Test atan2 with integer, long, float, double values. + */ + @ParameterizedTest(name = "atan2({0}, {1})") + @MethodSource("trigonometricDoubleArguments") + public void test_atan2(Number v1, Number v2) { + FunctionExpression atan2 = dsl.atan2(DSL.literal(v1), DSL.literal(v2)); + assertThat( + atan2.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.atan2(v1.doubleValue(), v2.doubleValue())))); + assertEquals(String.format("atan2(%s, %s)", v1, v2), atan2.toString()); + } + + /** + * Test atan2 with null value. + */ + @Test + public void atan2_null_value() { + FunctionExpression atan2 = dsl.atan2( + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), DSL.literal(1)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isNull()); + + atan2 = dsl.atan2(DSL.literal(1), DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isNull()); + + atan2 = dsl.atan2(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isNull()); + } + + /** + * Test atan2 with missing value. + */ + @Test + public void atan2_missing_value() { + FunctionExpression atan2 = dsl.atan2( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), DSL.literal(1)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isMissing()); + + atan2 = dsl.atan2(DSL.literal(1), DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isMissing()); + + atan2 = dsl.atan2(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isMissing()); + } + + /** + * Test atan2 with missing value. + */ + @Test + public void atan2_null_missing() { + FunctionExpression atan2 = dsl.atan2( + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isMissing()); + + atan2 = dsl.atan2(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE), + DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, atan2.type()); + assertTrue(atan2.valueOf(valueEnv()).isMissing()); + } + + /** + * Test cos with integer, long, float, double values. + */ + @ParameterizedTest(name = "cos({0})") + @MethodSource("trigonometricArguments") + public void test_cos(Number value) { + FunctionExpression cos = dsl.cos(DSL.literal(value)); + assertThat( + cos.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.cos(value.doubleValue())))); + assertEquals(String.format("cos(%s)", value), cos.toString()); + } + + /** + * Test cos with null value. + */ + @Test + public void cos_null_value() { + FunctionExpression cos = dsl.cos(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, cos.type()); + assertTrue(cos.valueOf(valueEnv()).isNull()); + } + + /** + * Test cos with missing value. + */ + @Test + public void cos_missing_value() { + FunctionExpression cos = dsl.cos(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, cos.type()); + assertTrue(cos.valueOf(valueEnv()).isMissing()); + } + + /** + * Test cot with integer, long, float, double values. + */ + @ParameterizedTest(name = "cot({0})") + @MethodSource("trigonometricArguments") + public void test_cot(Number value) { + FunctionExpression cot = dsl.cot(DSL.literal(value)); + assertThat( + cot.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(1 / Math.tan(value.doubleValue())))); + assertEquals(String.format("cot(%s)", value), cot.toString()); + } + + /** + * Test cot with out-of-range value 0. + */ + @ParameterizedTest(name = "cot({0})") + @ValueSource(doubles = {0}) + public void cot_with_zero(Number value) { + FunctionExpression cot = dsl.cot(DSL.literal(value)); + assertThrows( + ArithmeticException.class, () -> cot.valueOf(valueEnv()), + String.format("Out of range value for cot(%s)", value)); + } + + /** + * Test cot with null value. + */ + @Test + public void cot_null_value() { + FunctionExpression cot = dsl.cot(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, cot.type()); + assertTrue(cot.valueOf(valueEnv()).isNull()); + } + + /** + * Test cot with missing value. + */ + @Test + public void cot_missing_value() { + FunctionExpression cot = dsl.cot(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, cot.type()); + assertTrue(cot.valueOf(valueEnv()).isMissing()); + } + + /** + * Test degrees with integer, long, float, double values. + */ + @ParameterizedTest(name = "degrees({0})") + @MethodSource("trigonometricArguments") + public void test_degrees(Number value) { + FunctionExpression degrees = dsl.degrees(DSL.literal(value)); + assertThat( + degrees.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.toDegrees(value.doubleValue())))); + assertEquals(String.format("degrees(%s)", value), degrees.toString()); + } + + /** + * Test degrees with null value. + */ + @Test + public void degrees_null_value() { + FunctionExpression degrees = dsl.degrees(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, degrees.type()); + assertTrue(degrees.valueOf(valueEnv()).isNull()); + } + + /** + * Test degrees with missing value. + */ + @Test + public void degrees_missing_value() { + FunctionExpression degrees = dsl.degrees(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, degrees.type()); + assertTrue(degrees.valueOf(valueEnv()).isMissing()); + } + + /** + * Test radians with integer, long, float, double values. + */ + @ParameterizedTest(name = "radians({0})") + @MethodSource("trigonometricArguments") + public void test_radians(Number value) { + FunctionExpression radians = dsl.radians(DSL.literal(value)); + assertThat( + radians.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.toRadians(value.doubleValue())))); + assertEquals(String.format("radians(%s)", value), radians.toString()); + } + + /** + * Test radians with null value. + */ + @Test + public void radians_null_value() { + FunctionExpression radians = dsl.radians(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, radians.type()); + assertTrue(radians.valueOf(valueEnv()).isNull()); + } + + /** + * Test radians with missing value. + */ + @Test + public void radians_missing_value() { + FunctionExpression radians = dsl.radians(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, radians.type()); + assertTrue(radians.valueOf(valueEnv()).isMissing()); + } + + /** + * Test sin with integer, long, float, double values. + */ + @ParameterizedTest(name = "sin({0})") + @MethodSource("trigonometricArguments") + public void test_sin(Number value) { + FunctionExpression sin = dsl.sin(DSL.literal(value)); + assertThat( + sin.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.sin(value.doubleValue())))); + assertEquals(String.format("sin(%s)", value), sin.toString()); + } + + /** + * Test sin with null value. + */ + @Test + public void sin_null_value() { + FunctionExpression sin = dsl.sin(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, sin.type()); + assertTrue(sin.valueOf(valueEnv()).isNull()); + } + + /** + * Test sin with missing value. + */ + @Test + public void sin_missing_value() { + FunctionExpression sin = dsl.sin(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, sin.type()); + assertTrue(sin.valueOf(valueEnv()).isMissing()); + } + + /** + * Test tan with integer, long, float, double values. + */ + @ParameterizedTest(name = "tan({0})") + @MethodSource("trigonometricArguments") + public void test_tan(Number value) { + FunctionExpression tan = dsl.tan(DSL.literal(value)); + assertThat( + tan.valueOf(valueEnv()), + allOf(hasType(DOUBLE), hasValue(Math.tan(value.doubleValue())))); + assertEquals(String.format("tan(%s)", value), tan.toString()); + } + + /** + * Test tan with null value. + */ + @Test + public void tan_null_value() { + FunctionExpression tan = dsl.tan(DSL.ref(DOUBLE_TYPE_NULL_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, tan.type()); + assertTrue(tan.valueOf(valueEnv()).isNull()); + } + + /** + * Test tan with missing value. + */ + @Test + public void tan_missing_value() { + FunctionExpression tan = dsl.tan(DSL.ref(DOUBLE_TYPE_MISSING_VALUE_FIELD, DOUBLE)); + assertEquals(DOUBLE, tan.type()); + assertTrue(tan.valueOf(valueEnv()).isMissing()); + } +} diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/UnaryFunctionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/UnaryFunctionTest.java index fca5ccdbd8..e69de29bb2 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/UnaryFunctionTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/arthmetic/UnaryFunctionTest.java @@ -1,103 +0,0 @@ -/* - * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - */ - -package com.amazon.opendistroforelasticsearch.sql.expression.operator.arthmetic; - -import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.INT_TYPE_MISSING_VALUE_FIELD; -import static com.amazon.opendistroforelasticsearch.sql.config.TestConfig.INT_TYPE_NULL_VALUE_FIELD; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; -import static com.amazon.opendistroforelasticsearch.sql.utils.MatcherUtils.hasType; -import static com.amazon.opendistroforelasticsearch.sql.utils.MatcherUtils.hasValue; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.allOf; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; -import com.amazon.opendistroforelasticsearch.sql.expression.DSL; -import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionTestBase; -import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; -import org.junit.jupiter.api.DisplayNameGeneration; -import org.junit.jupiter.api.DisplayNameGenerator; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) -public class UnaryFunctionTest extends ExpressionTestBase { - - /** - * Test abs with integer value. - */ - @ParameterizedTest(name = "abs({0})") - @ValueSource(ints = {-2, 2}) - public void abs_int_value(Integer value) { - FunctionExpression abs = dsl.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(INTEGER), hasValue(Math.abs(value)))); - assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); - } - - /** - * Test abs with long value. - */ - @ParameterizedTest(name = "abs({0})") - @ValueSource(longs = {-2L, 2L}) - public void abs_long_value(Long value) { - FunctionExpression abs = dsl.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(ExprCoreType.LONG), hasValue(Math.abs(value)))); - assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); - } - - /** - * Test abs with float value. - */ - @ParameterizedTest(name = "abs({0})") - @ValueSource(floats = {-2f, 2f}) - public void abs_float_value(Float value) { - FunctionExpression abs = dsl.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(ExprCoreType.FLOAT), hasValue(Math.abs(value)))); - assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); - } - - /** - * Test abs with double value. - */ - @ParameterizedTest(name = "abs({0})") - @ValueSource(doubles = {-2L, 2L}) - public void abs_double_value(Double value) { - FunctionExpression abs = dsl.abs(DSL.literal(value)); - assertThat( - abs.valueOf(valueEnv()), - allOf(hasType(ExprCoreType.DOUBLE), hasValue(Math.abs(value)))); - assertEquals(String.format("abs(%s)", value.toString()), abs.toString()); - } - - @Test - public void abs_null_value() { - assertTrue(dsl.abs(DSL.ref(INT_TYPE_NULL_VALUE_FIELD, INTEGER)).valueOf(valueEnv()).isNull()); - } - - @Test - public void abs_missing_value() { - assertTrue( - dsl.abs(DSL.ref(INT_TYPE_MISSING_VALUE_FIELD, INTEGER)).valueOf(valueEnv()).isMissing()); - } -} diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java index 8da8967f30..5194d70278 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/BinaryPredicateOperatorTest.java @@ -30,19 +30,27 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; + import static com.amazon.opendistroforelasticsearch.sql.utils.ComparisonUtil.compare; import static com.amazon.opendistroforelasticsearch.sql.utils.OperatorUtils.matches; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionTestBase; import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; import java.util.Arrays; +import java.util.Base64; import java.util.List; import java.util.stream.Stream; import org.junit.jupiter.api.Test; @@ -719,4 +727,38 @@ public void test_null_like_missing() { assertEquals(BOOLEAN, like.type()); assertEquals(LITERAL_MISSING, like.valueOf(valueEnv())); } + + /** + * Todo. remove this test cases after script serilization implemented. + */ + @Test + public void serializationTest() throws Exception { + Expression expression = dsl.equal(DSL.literal("v1"), DSL.literal("v2")); + // serialization + ByteArrayOutputStream output = new ByteArrayOutputStream(); + ObjectOutputStream objectOutput = new ObjectOutputStream(output); + objectOutput.writeObject(expression); + objectOutput.flush(); + String source = Base64.getEncoder().encodeToString(output.toByteArray()); + + // deserialization + ByteArrayInputStream input = new ByteArrayInputStream(Base64.getDecoder().decode(source)); + ObjectInputStream objectInput = new ObjectInputStream(input); + Expression e = (Expression) objectInput.readObject(); + ExprValue exprValue = e.valueOf(valueEnv()); + + assertEquals(LITERAL_FALSE, exprValue); + } + + @Test + public void compareNumberValueWithDifferentType() { + FunctionExpression equal = dsl.equal(DSL.literal(1), DSL.literal(1L)); + assertTrue(equal.valueOf(valueEnv()).booleanValue()); + } + + @Test + public void compare_int_long() { + FunctionExpression equal = dsl.equal(DSL.literal(1), DSL.literal(1L)); + assertTrue(equal.valueOf(valueEnv()).booleanValue()); + } } \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java index ce0495c97b..9931304c8e 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/planner/DefaultImplementorTest.java @@ -62,7 +62,7 @@ public void visitShouldReturnDefaultPhysicalOperator() { ReferenceExpression include = ref("age", INTEGER); ReferenceExpression exclude = ref("name", STRING); ReferenceExpression dedupeField = ref("name", STRING); - Expression filterExpr = literal(ExprBooleanValue.ofTrue()); + Expression filterExpr = literal(ExprBooleanValue.of(true)); List groupByExprs = Arrays.asList(ref("age", INTEGER)); List aggregators = Arrays.asList(new AvgAggregator(groupByExprs, ExprCoreType.DOUBLE)); diff --git a/docs/attributions.md b/docs/attributions.md index 9b8675f83e..b157ef1c89 100644 --- a/docs/attributions.md +++ b/docs/attributions.md @@ -22,7 +22,7 @@ Apart from the problems we identified earlier, we made significant improvement i 1. *Integration Test*: We migrated all integrate tests to standard Elasticsearch IT framework which spins up in-memory cluster for testing. Now all test cases treat plugin code as blackbox and verify functionality from externally. 2. *New JDBC Driver*: We developed our own JDBC driver without any dependency on Elasticsearch proprietary code. - [sql-jdbc](https://github.com/opendistro-for-elasticsearch/sql-jdbc) + [sql-jdbc](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-jdbc) 3. *Better Hash JOIN*: OpenDistro SQL launched with Block Hash Join implementation with circuit break mechanism to protect your Elasticsearch memory. Performance testing showed our implementation is 1.5 ~ 2x better than old hash join in terms of throughput and latency and much lower error rate under heavy pressure. 4. *Query Planner*: Logical and physical planner was added to support JOIN query in efficient and extendible way. 5. *PartiQL Compatibility*: we are partially compatible with PartiQL specification which allows for query involved in nested JSON documents. diff --git a/docs/category.json b/docs/category.json index d889ba4a8d..595121a7d4 100644 --- a/docs/category.json +++ b/docs/category.json @@ -15,6 +15,9 @@ "experiment/ppl/cmd/where.rst" ], "sql_cli": [ - "user/dql/expressions.rst" + "user/dql/expressions.rst", + "user/general/identifiers.rst", + "user/dql/functions.rst", + "user/beyond/partiql.rst" ] } \ No newline at end of file diff --git a/docs/dev/Doctest.md b/docs/dev/Doctest.md index 18750834fe..2690901177 100644 --- a/docs/dev/Doctest.md +++ b/docs/dev/Doctest.md @@ -145,7 +145,7 @@ Doctest is relying on the console/command line to run code examples in documenta * https://github.com/crate/crate/blob/master/docs/general/dql/selects.rst -Similar to CarateDB using it’s CLI “crash”, we can make use of our own [SQL-CLI](https://github.com/opendistro-for-elasticsearch/sql-cli) +Similar to CrateDB using it’s CLI “crash”, we can make use of our own [SQL-CLI](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-cli) To support PPL, we need to add PPL support to SQL-CLI. Since PPL and SQL expose similar http endpoint for query and share similar response format. The update won’t be much of work. @@ -270,4 +270,4 @@ FAILURE: Build failed with an exception. ### 2.3.2 generate report * Python tests can’t be integrated to Jacoco test reporting -* TODO: need to figure out a better solution \ No newline at end of file +* TODO: need to figure out a better solution diff --git a/docs/dev/Testing.md b/docs/dev/Testing.md index 260adb1141..2c1df6a928 100644 --- a/docs/dev/Testing.md +++ b/docs/dev/Testing.md @@ -142,6 +142,12 @@ The workflow of generating test result is: ![The Workflow of Comparison Test](img/the-workflow-of-comparison-test.png) + 1. For the schema, name, type as well as their order requires to be the same. + 2. For the data rows, only data in each row matters with row order ignored. + 3. Report success if any other database result can match. + 4. Report error if all other databases throw exception. + 5. Report failure otherwise if mismatch and exception mixed. + ### 3.6 Visualization TODO diff --git a/docs/dev/img/the-workflow-of-comparison-test.png b/docs/dev/img/the-workflow-of-comparison-test.png index df3e839139..e38e4442ab 100644 Binary files a/docs/dev/img/the-workflow-of-comparison-test.png and b/docs/dev/img/the-workflow-of-comparison-test.png differ diff --git a/docs/experiment/ppl/cmd/search.rst b/docs/experiment/ppl/cmd/search.rst index d1a943f24f..eacc6eb966 100644 --- a/docs/experiment/ppl/cmd/search.rst +++ b/docs/experiment/ppl/cmd/search.rst @@ -32,14 +32,14 @@ PPL query:: od> source=accounts; fetched rows / total rows = 4/4 - +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ - | account_number | firstname | address | balance | gender | city | employer | state | age | email | lastname | - |------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------| - | 1 | Amber | 880 Holmes Lane | 39225 | M | Brogan | Pyrami | IL | 32 | amberduke@pyrami.com | Duke | - | 6 | Hattie | 671 Bristol Street | 5686 | M | Dante | Netagy | TN | 36 | hattiebond@netagy.com | Bond | - | 13 | Nanette | 789 Madison Street | 32838 | F | Nogal | Quility | VA | 28 | null | Bates | - | 18 | Dale | 467 Hutchinson Court | 4180 | M | Orick | null | MD | 33 | daleadams@boink.com | Adams | - +------------------+-------------+----------------------+-----------+----------+--------+------------+---------+-------+-----------------------+------------+ + +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | + |------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------| + | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | + | 6 | 5686 | Hattie | Bond | 36 | M | 671 Bristol Street | Netagy | hattiebond@netagy.com | Dante | TN | + | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | + | 18 | 4180 | Dale | Adams | 33 | M | 467 Hutchinson Court | null | daleadams@boink.com | Orick | MD | + +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ Example 2: Fetch data with condition ==================================== @@ -50,10 +50,10 @@ PPL query:: od> source=accounts account_number=1 or gender="F"; fetched rows / total rows = 2/2 - +------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------+ - | account_number | firstname | address | balance | gender | city | employer | state | age | email | lastname | - |------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------| - | 1 | Amber | 880 Holmes Lane | 39225 | M | Brogan | Pyrami | IL | 32 | amberduke@pyrami.com | Duke | - | 13 | Nanette | 789 Madison Street | 32838 | F | Nogal | Quility | VA | 28 | null | Bates | - +------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------+ + +------------------+-----------+-------------+------------+-------+----------+--------------------+------------+----------------------+--------+---------+ + | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | + |------------------+-----------+-------------+------------+-------+----------+--------------------+------------+----------------------+--------+---------| + | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | + | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | + +------------------+-----------+-------------+------------+-------+----------+--------------------+------------+----------------------+--------+---------+ diff --git a/docs/experiment/ppl/cmd/stats.rst b/docs/experiment/ppl/cmd/stats.rst index 2871951949..5d0ce101c6 100644 --- a/docs/experiment/ppl/cmd/stats.rst +++ b/docs/experiment/ppl/cmd/stats.rst @@ -51,8 +51,8 @@ PPL query:: +----------+--------------------+ | gender | avg(age) | |----------+--------------------| - | M | 33.666666666666664 | | F | 28 | + | M | 33.666666666666664 | +----------+--------------------+ @@ -68,7 +68,7 @@ PPL query:: +----------+--------------------+------------+ | gender | avg(age) | sum(age) | |----------+--------------------+------------| - | M | 33.666666666666664 | 101 | | F | 28 | 28 | + | M | 33.666666666666664 | 101 | +----------+--------------------+------------+ diff --git a/docs/experiment/ppl/cmd/where.rst b/docs/experiment/ppl/cmd/where.rst index 340f20e834..a74a3dcb1c 100644 --- a/docs/experiment/ppl/cmd/where.rst +++ b/docs/experiment/ppl/cmd/where.rst @@ -27,12 +27,12 @@ The example show fetch all the document from accounts index with . PPL query:: - od> source=accounts | where account_number=1 or gender="F"; + od> source=accounts | where account_number=1 or gender="F" | fields account_number, gender; fetched rows / total rows = 2/2 - +------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------+ - | account_number | firstname | address | balance | gender | city | employer | state | age | email | lastname | - |------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------| - | 1 | Amber | 880 Holmes Lane | 39225 | M | Brogan | Pyrami | IL | 32 | amberduke@pyrami.com | Duke | - | 13 | Nanette | 789 Madison Street | 32838 | F | Nogal | Quility | VA | 28 | null | Bates | - +------------------+-------------+--------------------+-----------+----------+--------+------------+---------+-------+----------------------+------------+ + +------------------+----------+ + | account_number | gender | + |------------------+----------| + | 1 | M | + | 13 | F | + +------------------+----------+ diff --git a/docs/user/beyond/partiql.rst b/docs/user/beyond/partiql.rst index 611ae72c54..6da74ba390 100644 --- a/docs/user/beyond/partiql.rst +++ b/docs/user/beyond/partiql.rst @@ -12,7 +12,9 @@ PartiQL (JSON) Support Introduction ============ -PartiQL is a SQL-compatible query language that makes it easy and efficient to query semi-structured and nested data regardless of data format. For now our implementation is only partially compatible with PartiQL specification and more support will be provided in future. +In Elasticsearch, there are two types of JSON field in Elasticsarch (called "properties"): ``object`` and ``nested``. An object field can have inner field(s) which could be a simple one or another object field recursively. A nested field is a special version of object type that allows inner field be queried independently. + +To support queries for both types, we follow the query language syntax defined in PartiQL specification. PartiQL is a SQL-compatible query language that makes it easy and efficient to query semi-structured and nested data regardless of data format. For now our implementation is only partially compatible with PartiQL specification and more support will be provided in future. Test Data ========= @@ -20,8 +22,50 @@ Test Data Description ----------- +The test index ``people`` is to demonstrate our support for queries with deep nested object fields. The test index ``employees_nested`` used by all examples in this document is very similar to the one used in official PartiQL documentation. +Example: People +--------------- + +There are three fields in test index ``people``: 1) deep nested object field ``city``; 2) object field of array value ``account``; 3) nested field ``projects``:: + + { + "mappings": { + "properties": { + "city": { + "properties": { + "name": { + "type": "keyword" + }, + "location": { + "properties": { + "latitude": { + "type": "double" + } + } + } + } + }, + "account": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "projects": { + "type": "nested", + "properties": { + "name": { + "type": "keyword" + } + } + } + } + } + } + Example: Employees ------------------ @@ -78,6 +122,71 @@ Result set:: ] } +Querying Nested Tuple Values +============================ + +Description +----------- + +Before looking into how nested object field (tuple values) be queried, we need to figure out how many cases are there and how it being handled by our SQL implementation. Therefore, first of all, let's examine different cases by the query support matrix as follows. This matrix summerizes what has been supported so far for queries with the object and nested fields involved. Note that another complexity is that any field in Elasticsearch, regular or property, can have contain more than one values in a single document. This makes object field not always a tuple value which needs to be handled separately. + ++-------------------------+---------------+-----------------------+---------------------------------------------+-------------------------+ +| Level/Field Type | Object Fields | Object Fields (array) | Nested Fields | Comment | ++=========================+===============+=======================+=============================================+=========================+ +| Selecting top level | Yes | Yes | Yes | The original JSON of | +| | | | | field value is returned | +| | | | | which is either a JSON | +| | | | | object or JSON array. | ++-------------------------+---------------+-----------------------+---------------------------------------------+-------------------------+ +| Selecting second level | Yes | No | Yes | | +| | | (null returned) | (or null returned if not in PartiQL syntax) | | ++-------------------------+---------------+-----------------------+---------------------------------------------+ PartiQL specification | +| Selecting deeper levels | Yes | No | No | is followed | +| | | (null returned) | (exception may | | +| | | | be thrown) | | ++-------------------------+---------------+-----------------------+---------------------------------------------+-------------------------+ + +Example 1: Selecting Top Level +------------------------------ + +Selecting top level for object fields, object fields of array value and nested fields returns original JSON object or array of the field. For example, object field ``city`` is a JSON object, object field (of array value) ``accounts`` and nested field ``projects`` are JSON arrays:: + + od> SELECT city, accounts, projects FROM people; + fetched rows / total rows = 1/1 + +-----------------------------------------------------+-----------------------+----------------------------------------------------------------------------------------------------------------+ + | city | accounts | projects | + |-----------------------------------------------------+-----------------------+----------------------------------------------------------------------------------------------------------------| + | {'name': 'Seattle', 'location': {'latitude': 10.5}} | [{'id': 1},{'id': 2}] | [{'name': 'AWS Redshift Spectrum querying'},{'name': 'AWS Redshift security'},{'name': 'AWS Aurora security'}] | + +-----------------------------------------------------+-----------------------+----------------------------------------------------------------------------------------------------------------+ + +Example 2: Selecting Deeper Levels +---------------------------------- + +Selecting at deeper levels for object fields of regular value returns inner field value. For example, ``city.location`` is an inner object field and ``city.location.altitude`` is a regular double field:: + + od> SELECT city.location, city.location.latitude FROM people; + fetched rows / total rows = 1/1 + +--------------------+--------------------------+ + | city.location | city.location.latitude | + |--------------------+--------------------------| + | {'latitude': 10.5} | 10.5 | + +--------------------+--------------------------+ + +Example 3: Selecting Field of Array Value +----------------------------------------- + +Select deeper level for object fields of array value which returns ``NULL``. For example, because inner field ``accounts.id`` has three values instead of a tuple in this document, null is returned. Similarly, selecting inner field ``projects.name`` directly in nested field returns null:: + + od> SELECT accounts.id, projects.name FROM people; + fetched rows / total rows = 1/1 + +---------------+-----------------+ + | accounts.id | projects.name | + |---------------+-----------------| + | null | null | + +---------------+-----------------+ + +For selecting second level for nested fields, please read on and find more details in the following sections. + Querying Nested Collection ========================== diff --git a/docs/user/dql/basics.rst b/docs/user/dql/basics.rst index 3b7d0c4746..082abe9aee 100644 --- a/docs/user/dql/basics.rst +++ b/docs/user/dql/basics.rst @@ -28,6 +28,8 @@ The syntax of ``SELECT`` statement is as follows:: [ORDER BY expression [IS [NOT] NULL] [ASC | DESC] [, ...]] [LIMIT [offset, ] size] +Although multiple query statements to execute in batch is not supported, ending with semicolon ``;`` is still allowed. For example, you can run ``SELECT * FROM accounts;`` without issue. This is useful to support queries generated by other tool, such as Microsoft Excel or BI tool. + Fundamentals ------------ diff --git a/docs/user/dql/expressions.rst b/docs/user/dql/expressions.rst index c81f6ee6f7..dca1c6463b 100644 --- a/docs/user/dql/expressions.rst +++ b/docs/user/dql/expressions.rst @@ -14,7 +14,7 @@ Introduction Expressions, particularly value expressions, are those which return a scalar value. Expressions have different types and forms. For example, there are literal values as atom expression and arithmetic, predicate and function expression built on top of them. And also expressions can be used in different clauses, such as using arithmetic expression in ``SELECT``, ``WHERE`` or ``HAVING`` clause. -Note that before you try out examples using the SQL features in this doc, you need to enable the new query engine by following the steps in ``opendistro.sql.engine.new.enabled`` section in `Plugin Settings `_. +Note that before you try out examples using the SQL features in this doc, you need to enable the new query engine by following the steps in ``opendistro.sql.engine.new.enabled`` section in `Plugin Settings <../admin/settings.rst>`_. Literal Values ============== @@ -27,28 +27,30 @@ A literal is a symbol that represents a value. The most common literal values in 1. Numeric literals: specify numeric values such as integer and floating-point numbers. 2. String literals: specify a string enclosed by single or double quotes. 3. Boolean literals: ``true`` or ``false``. +4. Date and Time literals: DATE 'YYYY-MM-DD' represent the date, TIME 'hh:mm:ss' represent the time, TIMESTAMP 'YYYY-MM-DD hh:mm:ss' represent the timestamp. Examples -------- Here is an example for different type of literals:: - od> SELECT 123, 'hello', false, -4.567; + od> SELECT 123, 'hello', false, -4.567, DATE '2020-07-07', TIME '01:01:01', TIMESTAMP '2020-07-07 01:01:01'; fetched rows / total rows = 1/1 - +-------+-----------+---------+----------+ - | 123 | "hello" | false | -4.567 | - |-------+-----------+---------+----------| - | 123 | hello | False | -4.567 | - +-------+-----------+---------+----------+ + +-------+-----------+---------+----------+---------------------+-------------------+-----------------------------------+ + | 123 | "hello" | false | -4.567 | DATE '2020-07-07' | TIME '01:01:01' | TIMESTAMP '2020-07-07 01:01:01' | + |-------+-----------+---------+----------+---------------------+-------------------+-----------------------------------| + | 123 | hello | False | -4.567 | 2020-07-07 | 01:01:01 | 2020-07-07 01:01:01 | + +-------+-----------+---------+----------+---------------------+-------------------+-----------------------------------+ Limitations ----------- The current implementation has the following limitations at the moment: -1. Only literals of data types listed as above are supported for now. Other type of literals, such as date and NULL, will be added in future. +1. Only literals of data types listed as above are supported for now. Other type of literals, such as NULL, will be added in future. 2. Expression of literals, such as arithmetic expressions, will be supported later. 3. Standard ANSI ``VALUES`` clause is not supported, although the ``SELECT`` literal example above is implemented by a Values operator internally. +4. Date and Time literals only support DATE_FORMAT listed above. Arithmetic Expressions ====================== @@ -111,8 +113,8 @@ Null Handling If any argument is missing or null, the final result of evaluation will be missing or null accordingly. -Examples --------- +Arithmetic function examples +---------------------------- Here is an example for different type of arithmetic expressions:: @@ -124,6 +126,19 @@ Here is an example for different type of arithmetic expressions:: | 1.234 | 5 | +---------------+---------------------+ +Date function examples +---------------------- + +Here is an example for different type of arithmetic expressions:: + + od> SELECT dayofmonth(DATE '2020-07-07'); + fetched rows / total rows = 1/1 + +---------------------------------+ + | dayofmonth(DATE '2020-07-07') | + |---------------------------------| + | 7 | + +---------------------------------+ + Limitations ----------- diff --git a/docs/user/dql/functions.rst b/docs/user/dql/functions.rst index d048cd10fb..a8d97fde7c 100644 --- a/docs/user/dql/functions.rst +++ b/docs/user/dql/functions.rst @@ -33,9 +33,21 @@ ACOS Description ----------- -Specifications: +Usage: acos(x) calculate the arc cosine of x. Returns NULL if x is not in the range -1 to 1. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE + +Example:: -1. ACOS(NUMBER T) -> DOUBLE + od> SELECT ACOS(0) + fetched rows / total rows = 1/1 + +--------------------+ + | acos(0) | + |--------------------| + | 1.5707963267948966 | + +--------------------+ ADD @@ -66,9 +78,21 @@ ASIN Description ----------- -Specifications: +Usage: asin(x) calculate the arc sine of x. Returns NULL if x is not in the range -1 to 1. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE -1. ASIN(NUMBER T) -> DOUBLE +Example:: + + od> SELECT ASIN(0) + fetched rows / total rows = 1/1 + +-----------+ + | asin(0) | + |-----------| + | 0 | + +-----------+ ATAN @@ -77,9 +101,21 @@ ATAN Description ----------- -Specifications: +Usage: atan(x) calculates the arc tangent of x. atan(y, x) calculates the arc tangent of y / x, except that the signs of both arguments are used to determine the quadrant of the result. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE -1. ATAN(NUMBER T) -> DOUBLE +Example:: + + od> SELECT ATAN(2), ATAN(2, 3) + fetched rows / total rows = 1/1 + +--------------------+--------------------+ + | atan(2) | atan(2, 3) | + |--------------------+--------------------| + | 1.1071487177940904 | 0.5880026035475675 | + +--------------------+--------------------+ ATAN2 @@ -88,9 +124,21 @@ ATAN2 Description ----------- -Specifications: +Usage: atan2(y, x) calculates the arc tangent of y / x, except that the signs of both arguments are used to determine the quadrant of the result. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE -1. ATAN2(NUMBER T, NUMBER) -> DOUBLE +Return type: DOUBLE + +Example:: + + od> SELECT ATAN2(2, 3) + fetched rows / total rows = 1/1 + +--------------------+ + | atan2(2, 3) | + |--------------------| + | 0.5880026035475675 | + +--------------------+ CAST @@ -139,15 +187,50 @@ Description Specification is undefined and type check is skipped for now + +CONV +==== + +Description +----------- + +Usage: CONV(x, a, b) converts the number x from a base to b base. + +Argument type: x: STRING, a: INTEGER, b: INTEGER + +Return type: STRING + +Example:: + + od> SELECT CONV('12', 10, 16), CONV('2C', 16, 10), CONV(12, 10, 2), CONV(1111, 2, 10) + fetched rows / total rows = 1/1 + +----------------------+----------------------+-------------------+---------------------+ + | conv("12", 10, 16) | conv("2C", 16, 10) | conv(12, 10, 2) | conv(1111, 2, 10) | + |----------------------+----------------------+-------------------+---------------------| + | c | 44 | 1100 | 15 | + +----------------------+----------------------+-------------------+---------------------+ + COS === Description ----------- -Specifications: +Usage: cos(x) calculate the cosine of x, where x is given in radians. -1. COS(NUMBER T) -> DOUBLE +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE + +Example:: + + od> SELECT COS(0) + fetched rows / total rows = 1/1 + +----------+ + | cos(0) | + |----------| + | 1 | + +----------+ COSH @@ -167,9 +250,44 @@ COT Description ----------- -Specifications: +Usage: cot(x) calculate the cotangent of x. Returns out-of-range error if x equals to 0. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE + +Example:: + + od> SELECT COT(1) + fetched rows / total rows = 1/1 + +--------------------+ + | cot(1) | + |--------------------| + | 0.6420926159343306 | + +--------------------+ + + +CRC32 +===== + +Description +----------- -1. COT(NUMBER T) -> DOUBLE +Usage: Calculates a cyclic redundancy check value and returns a 32-bit unsigned value. + +Argument type: STRING + +Return type: LONG + +Example:: + + od> SELECT CRC32('MySQL') + fetched rows / total rows = 1/1 + +------------------+ + | crc32("MySQL") | + |------------------| + | 3259397556 | + +------------------+ CURDATE @@ -223,9 +341,21 @@ DEGREES Description ----------- -Specifications: +Usage: degrees(x) converts x from radians to degrees. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE -1. DEGREES(NUMBER T) -> DOUBLE +Example:: + + od> SELECT DEGREES(1.57) + fetched rows / total rows = 1/1 + +-------------------+ + | degrees(1.57) | + |-------------------| + | 89.95437383553924 | + +-------------------+ DIVIDE @@ -245,9 +375,19 @@ E Description ----------- -Specifications: +Usage: E() returns the Euler's number + +Return type: DOUBLE -1. E() -> DOUBLE +Example:: + + od> SELECT E() + fetched rows / total rows = 1/1 + +-------------------+ + | e() | + |-------------------| + | 2.718281828459045 | + +-------------------+ EXP @@ -429,15 +569,27 @@ Specifications: 1. MAKETIME(INTEGER, INTEGER, INTEGER) -> DATE -MODULUS +MOD ======= Description ----------- -Specifications: +Usage: MOD(n, m) calculates the remainder of the number n divided by m. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE -1. MODULUS(NUMBER T, NUMBER) -> T +Return type: Wider type between types of n and m if m is nonzero value. If m equals to 0, then returns NULL. + +Example:: + + od> SELECT MOD(3, 2), MOD(3.1, 2) + fetched rows / total rows = 1/1 + +-------------+---------------+ + | mod(3, 2) | mod(3.1, 2) | + |-------------+---------------| + | 1 | 1.1 | + +-------------+---------------+ MONTH @@ -490,9 +642,19 @@ PI Description ----------- -Specifications: +Usage: PI() returns the constant pi + +Return type: DOUBLE -1. PI() -> DOUBLE +Example:: + + od> SELECT PI() + fetched rows / total rows = 1/1 + +-------------------+ + | pi() | + |-------------------| + | 3.141592653589793 | + +-------------------+ POW @@ -501,10 +663,21 @@ POW Description ----------- -Specifications: +Usage: POW(x, y) calculates the value of x raised to the power of y. Bad inputs return NULL result. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE -1. POW(NUMBER T) -> T -2. POW(NUMBER T, NUMBER) -> T +Return type: DOUBLE + +Example:: + + od> SELECT POW(3, 2), POW(-3, 2), POW(3, -2) + fetched rows / total rows = 1/1 + +-------------+--------------+--------------------+ + | pow(3, 2) | pow(-3, 2) | pow(3, -2) | + |-------------+--------------+--------------------| + | 9 | 9 | 0.1111111111111111 | + +-------------+--------------+--------------------+ POWER @@ -513,10 +686,21 @@ POWER Description ----------- -Specifications: +Usage: POWER(x, y) calculates the value of x raised to the power of y. Bad inputs return NULL result. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE -1. POWER(NUMBER T) -> T -2. POWER(NUMBER T, NUMBER) -> T +Return type: DOUBLE + +Example:: + + od> SELECT POWER(3, 2), POWER(-3, 2), POWER(3, -2) + fetched rows / total rows = 1/1 + +---------------+----------------+--------------------+ + | power(3, 2) | power(-3, 2) | power(3, -2) | + |---------------+----------------+--------------------| + | 9 | 9 | 0.1111111111111111 | + +---------------+----------------+--------------------+ RADIANS @@ -525,9 +709,21 @@ RADIANS Description ----------- -Specifications: +Usage: radians(x) converts x from degrees to radians. -1. RADIANS(NUMBER T) -> DOUBLE +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE + +Example:: + + od> SELECT RADIANS(90) + fetched rows / total rows = 1/1 + +--------------------+ + | radians(90) | + |--------------------| + | 1.5707963267948966 | + +--------------------+ RAND @@ -536,10 +732,21 @@ RAND Description ----------- -Specifications: +Usage: RAND()/RAND(N) returns a random floating-point value in the range 0 <= value < 1.0. If integer N is specified, the seed is initialized prior to execution. One implication of this behavior is with identical argument N, rand(N) returns the same value each time, and thus produces a repeatable sequence of column values. + +Argument type: INTEGER + +Return type: FLOAT + +Example:: -1. RAND() -> NUMBER -2. RAND(NUMBER T) -> T + od> SELECT RAND(3) + fetched rows / total rows = 1/1 + +------------+ + | rand(3) | + |------------| + | 0.73105735 | + +------------+ REPLACE @@ -581,9 +788,24 @@ ROUND Description ----------- -Specifications: +Usage: ROUND(x, d) rounds the argument x to d decimal places, d defaults to 0 if not specified + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type map: + +(INTEGER/LONG [,INTEGER]) -> LONG +(FLOAT/DOUBLE [,INTEGER]) -> LONG -1. ROUND(NUMBER T) -> T +Example:: + + od> SELECT ROUND(12.34), ROUND(12.34, 1), ROUND(12.34, -1), ROUND(12, 1) + fetched rows / total rows = 1/1 + +----------------+-------------------+--------------------+----------------+ + | round(12.34) | round(12.34, 1) | round(12.34, -1) | round(12, 1) | + |----------------+-------------------+--------------------+----------------| + | 12 | 12.3 | 10 | 12 | + +----------------+-------------------+--------------------+----------------+ RTRIM @@ -603,9 +825,21 @@ SIGN Description ----------- -Specifications: +Usage: Returns the sign of the argument as -1, 0, or 1, depending on whether the number is negative, zero, or positive + +Argument type: INTEGER/LONG/FLOAT/DOUBLE -1. SIGN(NUMBER T) -> T +Return type: INTEGER + +Example:: + + od> SELECT SIGN(1), SIGN(0), SIGN(-1.1) + fetched rows / total rows = 1/1 + +-----------+-----------+--------------+ + | sign(1) | sign(0) | sign(-1.1) | + |-----------+-----------+--------------| + | 1 | 0 | -1 | + +-----------+-----------+--------------+ SIGNUM @@ -625,9 +859,21 @@ SIN Description ----------- -Specifications: +Usage: sin(x) calculate the sine of x, where x is given in radians. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE -1. SIN(NUMBER T) -> DOUBLE +Return type: DOUBLE + +Example:: + + od> SELECT SIN(0) + fetched rows / total rows = 1/1 + +----------+ + | sin(0) | + |----------| + | 0 | + +----------+ SINH @@ -647,9 +893,24 @@ SQRT Description ----------- -Specifications: +Usage: Calculates the square root of a non-negative number -1. SQRT(NUMBER T) -> T +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type map: + +(Non-negative) INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE +(Negative) INTEGER/LONG/FLOAT/DOUBLE -> NULL + +Example:: + + od> SELECT SQRT(4), SQRT(4.41) + fetched rows / total rows = 1/1 + +-----------+--------------+ + | sqrt(4) | sqrt(4.41) | + |-----------+--------------| + | 2 | 2.1 | + +-----------+--------------+ SUBSTRING @@ -680,9 +941,21 @@ TAN Description ----------- -Specifications: +Usage: tan(x) calculate the tangent of x, where x is given in radians. + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type: DOUBLE -1. TAN(NUMBER T) -> DOUBLE +Example:: + + od> SELECT TAN(0) + fetched rows / total rows = 1/1 + +----------+ + | tan(0) | + |----------| + | 0 | + +----------+ TIMESTAMP @@ -707,6 +980,31 @@ Specifications: 1. TRIM(STRING T) -> T +TRUNCATE +======== + +Description +----------- + +Usage: TRUNCATE(x, d) returns the number x, truncated to d decimal place + +Argument type: INTEGER/LONG/FLOAT/DOUBLE + +Return type map: + +INTEGER/LONG -> LONG +FLOAT/DOUBLE -> DOUBLE + +Example:: + + fetched rows / total rows = 1/1 + +----------------------+-----------------------+-------------------+ + | truncate(56.78, 1) | truncate(56.78, -1) | truncate(56, 1) | + |----------------------+-----------------------+-------------------| + | 56.7 | 50 | 56 | + +----------------------+-----------------------+-------------------+ + + UPPER ===== diff --git a/docs/user/general/datatype.rst b/docs/user/general/datatype.rst new file mode 100644 index 0000000000..4ebdf6f56a --- /dev/null +++ b/docs/user/general/datatype.rst @@ -0,0 +1,74 @@ +========== +Data Types +========== + +.. rubric:: Table of contents + +.. contents:: + :local: + :depth: 2 + + +ODFE SQL Data Types +=================== + +The ODFE SQL Engine support the following data types. + ++---------------+ +| ODFE SQL Type | ++===============+ +| boolean | ++---------------+ +| integer | ++---------------+ +| long | ++---------------+ +| float | ++---------------+ +| double | ++---------------+ +| string | ++---------------+ +| text | ++---------------+ +| timestamp | ++---------------+ +| date | ++---------------+ +| time | ++---------------+ +| struct | ++---------------+ +| array | ++---------------+ + +Data Types Mapping +================== + +The table below list the mapping between Elasticsearch Data Type, ODFE SQL Data Type and SQL Type. + ++--------------------+---------------+-----------+ +| Elasticsearch Type | ODFE SQL Type | SQL Type | ++====================+===============+===========+ +| boolean | boolean | BOOLEAN | ++--------------------+---------------+-----------+ +| integer | integer | INTEGER | ++--------------------+---------------+-----------+ +| long | long | LONG | ++--------------------+---------------+-----------+ +| float | float | FLOAT | ++--------------------+---------------+-----------+ +| double | double | DOUBLE | ++--------------------+---------------+-----------+ +| keyword | string | VARCHAR | ++--------------------+---------------+-----------+ +| text | text | VARCHAR | ++--------------------+---------------+-----------+ +| date | timestamp | TIMESTAMP | ++--------------------+---------------+-----------+ +| object | struct | STRUCT | ++--------------------+---------------+-----------+ +| nested | array | TBD | ++--------------------+---------------+-----------+ + +Notes: Not all the ODFE SQL Type has correspond Elasticsearch Type. e.g. data and time. To use function which required such data type, user should explict convert the data type. diff --git a/docs/user/general/identifiers.rst b/docs/user/general/identifiers.rst new file mode 100644 index 0000000000..5dbee26131 --- /dev/null +++ b/docs/user/general/identifiers.rst @@ -0,0 +1,108 @@ +=========== +Identifiers +=========== + +.. rubric:: Table of contents + +.. contents:: + :local: + :depth: 2 + + +Introduction +============ + +Identifiers are used for naming your database objects, such as index name, field name, alias etc. Basically there are two types of identifiers: regular identifiers and delimited identifiers. + + +Regular Identifiers +=================== + +Description +----------- + +According to ANSI SQL standard, a regular identifier is a string of characters that must start with ASCII letter (lower or upper case). The subsequent character can be a combination of letter, digit, underscore (``_``). It cannot be a reversed key word. And whitespace and other special characters are not allowed. Additionally in our SQL parser, we make extension to the rule for Elasticsearch storage as shown in next sub-section. + +Extensions +---------- + +For Elasticsearch, the following identifiers are supported extensionally by our SQL parser for convenience (without the need of being delimited as shown in next section): + +1. Identifiers prefixed by dot ``.``: this is called hidden index in Elasticsearch, for example ``.kibana``. +2. Identifiers prefixed by at sign ``@``: this is common for meta fields generated in Logstash ingestion. +3. Identifiers with ``-`` in the middle: this is mostly the case for index name with date information. +4. Identifiers with star ``*`` present: this is mostly an index pattern for wildcard match. + +Examples +-------- + +Here are examples for using index pattern directly without quotes:: + + od> SELECT * FROM *cc*nt*; + fetched rows / total rows = 4/4 + +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | + |------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------| + | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | + | 6 | 5686 | Hattie | Bond | 36 | M | 671 Bristol Street | Netagy | hattiebond@netagy.com | Dante | TN | + | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | + | 18 | 4180 | Dale | Adams | 33 | M | 467 Hutchinson Court | null | daleadams@boink.com | Orick | MD | + +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + + +Delimited Identifiers +===================== + +Description +----------- + +A delimited identifier is an identifier enclosed in back ticks ````` or double quotation marks ``"``. In this case, the identifier enclosed is not necessarily a regular identifier. In other words, it can contain any special character not allowed by regular identifier. + +Please note the difference between single quote and double quotes in SQL syntax. Single quote is used to enclose a string literal while double quotes have same purpose as back ticks to escape special characters in an identifier. + +Use Cases +--------- + +Here are typical examples of the use of delimited identifiers: + +1. Identifiers of reserved key word name +2. Identifiers with dot ``.`` present: similarly as ``-`` in index name to include date information, it is required to be quoted so parser can differentiate it from identifier with qualifiers. +3. Identifiers with other special character: Elasticsearch has its own rule which allows more special character, for example Unicode character is supported in index name. + +Examples +-------- + +Here are examples for quoting an index name by back ticks:: + + od> SELECT * FROM `accounts`; + fetched rows / total rows = 4/4 + +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + | account_number | balance | firstname | lastname | age | gender | address | employer | email | city | state | + |------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------| + | 1 | 39225 | Amber | Duke | 32 | M | 880 Holmes Lane | Pyrami | amberduke@pyrami.com | Brogan | IL | + | 6 | 5686 | Hattie | Bond | 36 | M | 671 Bristol Street | Netagy | hattiebond@netagy.com | Dante | TN | + | 13 | 32838 | Nanette | Bates | 28 | F | 789 Madison Street | Quility | null | Nogal | VA | + | 18 | 4180 | Dale | Adams | 33 | M | 467 Hutchinson Court | null | daleadams@boink.com | Orick | MD | + +------------------+-----------+-------------+------------+-------+----------+----------------------+------------+-----------------------+--------+---------+ + + +Case Sensitivity +================ + +Description +----------- + +In SQL-92, regular identifiers are case insensitive and converted to upper case automatically just like key word. While characters in a delimited identifier appear as they are. However, in our SQL implementation, identifiers are treated in case sensitive manner. So it must be exactly same as what is stored in Elasticsearch which is different from ANSI standard. + +Examples +-------- + +For example, if you run ``SELECT * FROM ACCOUNTS``, it will end up with an index not found exception from our plugin because the actual index name is under lower case. + + +Identifier Qualifiers +===================== + +For now, we do not support using Elasticsearch cluster name as catalog name to qualify an index name, such as ``my-cluster.logs``. + +TODO: field name qualifiers diff --git a/docs/user/index.rst b/docs/user/index.rst index c3562c305c..d5f668e9f3 100644 --- a/docs/user/index.rst +++ b/docs/user/index.rst @@ -17,6 +17,11 @@ Open Distro for Elasticsearch SQL enables you to extract insights out of Elastic - `Plugin Settings `_ +* **Language Structure** + + - `Identifiers `_ + - `Data Types `_ + * **Data Query Language** - `Expressions `_ diff --git a/doctest/test_data/people.json b/doctest/test_data/people.json new file mode 100644 index 0000000000..2f56bfb7c1 --- /dev/null +++ b/doctest/test_data/people.json @@ -0,0 +1 @@ +{"city": {"name": "Seattle", "location": {"latitude": 10.5}}, "accounts": [{"id": 1}, {"id": 2}], "projects": [{"name": "AWS Redshift Spectrum querying"}, {"name": "AWS Redshift security"}, {"name": "AWS Aurora security"}] } diff --git a/doctest/test_docs.py b/doctest/test_docs.py index 05829addbb..d73554b615 100644 --- a/doctest/test_docs.py +++ b/doctest/test_docs.py @@ -13,6 +13,7 @@ import doctest import os +import os.path import zc.customdoctests import json import re @@ -30,6 +31,7 @@ ENDPOINT = "http://localhost:9200" ACCOUNTS = "accounts" EMPLOYEES = "employees" +PEOPLE = "people" class DocTestConnection(ESConnection): @@ -88,17 +90,23 @@ def bash_transform(s): ps1=r'sh\$', comment_prefix='#', transform=bash_transform) -def set_up_accounts(test): +def set_up_test_indices(test): set_up(test) load_file("accounts.json", index_name=ACCOUNTS) + load_file("people.json", index_name=PEOPLE) def load_file(filename, index_name): - filepath = "./test_data/" + filename + # Create index with the mapping if mapping file exists + mapping_file_path = './test_mapping/' + filename + if os.path.isfile(mapping_file_path): + with open(mapping_file_path, 'r') as f: + test_data_client.indices.create(index=index_name, body=f.read()) # generate iterable data + data_file_path = './test_data/' + filename def load_json(): - with open(filepath, "r") as f: + with open(data_file_path, 'r') as f: for line in f: yield json.loads(line) @@ -114,7 +122,7 @@ def set_up(test): def tear_down(test): # drop leftover tables after each test - test_data_client.indices.delete(index=[ACCOUNTS, EMPLOYEES], ignore_unavailable=True) + test_data_client.indices.delete(index=[ACCOUNTS, EMPLOYEES, PEOPLE], ignore_unavailable=True) docsuite = partial(doctest.DocFileSuite, @@ -140,7 +148,7 @@ def doc_suite(fn): return docsuite( fn, parser=bash_parser, - setUp=set_up_accounts, + setUp=set_up_test_indices, globs={ 'sh': partial( subprocess.run, @@ -178,7 +186,7 @@ def load_tests(loader, suite, ignore): docsuite( fn, parser=sql_cli_parser, - setUp=set_up_accounts + setUp=set_up_test_indices ) ) @@ -188,7 +196,7 @@ def load_tests(loader, suite, ignore): docsuite( fn, parser=ppl_cli_parser, - setUp=set_up_accounts + setUp=set_up_test_indices ) ) diff --git a/doctest/test_mapping/people.json b/doctest/test_mapping/people.json new file mode 100644 index 0000000000..6c4ef1f7b2 --- /dev/null +++ b/doctest/test_mapping/people.json @@ -0,0 +1,35 @@ +{ + "mappings": { + "properties": { + "city": { + "properties": { + "name": { + "type": "keyword" + }, + "location": { + "properties": { + "latitude": { + "type": "double" + } + } + } + } + }, + "account": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "projects": { + "type": "nested", + "properties": { + "name": { + "type": "keyword" + } + } + } + } + } +} \ No newline at end of file diff --git a/elasticsearch/build.gradle b/elasticsearch/build.gradle index b8407262ef..9adb955957 100644 --- a/elasticsearch/build.gradle +++ b/elasticsearch/build.gradle @@ -11,13 +11,16 @@ repositories { dependencies { compile project(':core') compile group: 'org.elasticsearch', name: 'elasticsearch', version: "${es_version}" - compile group: 'org.elasticsearch.client', name: 'elasticsearch-rest-high-level-client', version: "${es_version}" compile "io.github.resilience4j:resilience4j-retry:1.5.0" + compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.10.4' + compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.4' + compileOnly group: 'org.elasticsearch.client', name: 'elasticsearch-rest-high-level-client', version: "${es_version}" testImplementation('org.junit.jupiter:junit-jupiter:5.6.2') testCompile group: 'org.hamcrest', name: 'hamcrest-library', version: '2.1' testCompile group: 'org.mockito', name: 'mockito-core', version: '3.3.3' testCompile group: 'org.mockito', name: 'mockito-junit-jupiter', version: '3.3.3' + testCompile group: 'org.elasticsearch.client', name: 'elasticsearch-rest-high-level-client', version: "${es_version}" } test { diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/type/ElasticsearchDataType.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/type/ElasticsearchDataType.java new file mode 100644 index 0000000000..cb2a277356 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/type/ElasticsearchDataType.java @@ -0,0 +1,63 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; + +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import lombok.RequiredArgsConstructor; + +/** + * The extension of ExprType in Elasticsearch. + */ +@RequiredArgsConstructor +public enum ElasticsearchDataType implements ExprType { + /** + * Elasticsearch Text. + * Ref: https://www.elastic.co/guide/en/elasticsearch/reference/current/text.html + */ + ES_TEXT(Collections.singletonList(STRING), "string"), + + /** + * Elasticsearch multi-fields which has text and keyword. + * Ref: https://www.elastic.co/guide/en/elasticsearch/reference/current/multi-fields.html + */ + ES_TEXT_KEYWORD(Arrays.asList(STRING, ES_TEXT), "string"); + + /** + * Parent of current type. + */ + private final List parents; + /** + * JDBC type name. + */ + private final String jdbcType; + + @Override + public List getParent() { + return parents; + } + + @Override + public String typeName() { + return jdbcType; + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchDateFormatters.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchDateFormatters.java new file mode 100644 index 0000000000..cfd09f6ad3 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchDateFormatters.java @@ -0,0 +1,97 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value; + +import static java.time.temporal.ChronoField.DAY_OF_MONTH; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.MONTH_OF_YEAR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; + +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.ResolverStyle; +import java.time.format.SignStyle; +import java.time.temporal.ChronoField; +import java.util.Locale; +import lombok.experimental.UtilityClass; + +/** + * DateTimeFormatter. + * Reference org.elasticsearch.common.time.DateFormatters. + */ +@UtilityClass +public class ElasticsearchDateFormatters { + + public static final DateTimeFormatter TIME_ZONE_FORMATTER_NO_COLON = + new DateTimeFormatterBuilder() + .appendOffset("+HHmm", "Z") + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + public static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = + new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .optionalStart() + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + public static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER = + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .optionalStart() + .appendLiteral('T') + .optionalStart() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 1, 9, true) + .optionalEnd() + .optionalStart() + .appendLiteral(',') + .appendFraction(NANO_OF_SECOND, 1, 9, false) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_NO_COLON) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + public static final DateTimeFormatter SQL_LITERAL_DATE_TIME_FORMAT = DateTimeFormatter + .ofPattern("yyyy-MM-dd HH:mm:ss"); +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextValue.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextValue.java new file mode 100644 index 0000000000..927b62bbd2 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextValue.java @@ -0,0 +1,37 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value; + +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprStringValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; + +/** + * Expression Text Value, it is a extension of the ExprValue by Elasticsearch. + */ +public class ElasticsearchExprTextValue extends ExprStringValue { + public ElasticsearchExprTextValue(String value) { + super(value); + } + + @Override + public ExprType type() { + return ES_TEXT; + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactory.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactory.java new file mode 100644 index 0000000000..82832ac034 --- /dev/null +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactory.java @@ -0,0 +1,206 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value; + +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.nullValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.ARRAY; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRUCT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIMESTAMP; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchDateFormatters.SQL_LITERAL_DATE_TIME_FORMAT; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchDateFormatters.STRICT_DATE_OPTIONAL_TIME_FORMATTER; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprCollectionValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprDoubleValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprFloatValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprIntegerValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprLongValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprStringValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTimestampValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import java.time.Instant; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import lombok.RequiredArgsConstructor; +import org.elasticsearch.common.time.DateFormatters; + +/** Construct ExprValue from Elasticsearch response. */ +@RequiredArgsConstructor +public class ElasticsearchExprValueFactory { + /** The Mapping of Field and ExprType. */ + private final Map typeMapping; + + private static final DateTimeFormatter DATE_TIME_FORMATTER = + new DateTimeFormatterBuilder() + .appendOptional(SQL_LITERAL_DATE_TIME_FORMAT) + .appendOptional(STRICT_DATE_OPTIONAL_TIME_FORMATTER) + .toFormatter(); + + private static final String TOP_PATH = ""; + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + /** + * The struct construction has the following assumption. 1. The field has Elasticsearch Object + * data type. https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html 2. The + * deeper field is flattened in the typeMapping. e.g. {"employ", "STRUCT"} {"employ.id", + * "INTEGER"} {"employ.state", "STRING"} + */ + public ExprTupleValue construct(String jsonString) { + try { + return constructStruct(OBJECT_MAPPER.readTree(jsonString), TOP_PATH); + } catch (JsonProcessingException e) { + throw new IllegalStateException(String.format("invalid json: %s.", jsonString), e); + } + } + + /** Construct ExprValue from field and value pair. */ + private ExprValue construct(String field, JsonNode value) { + if (value.isNull()) { + return nullValue(); + } + + ExprType type = type(field); + if (type.equals(INTEGER)) { + return constructInteger(value); + } else if (type.equals(LONG)) { + return constructLong(value); + } else if (type.equals(FLOAT)) { + return constructFloat(value); + } else if (type.equals(DOUBLE)) { + return constructDouble(value); + } else if (type.equals(STRING)) { + return constructString(value); + } else if (type.equals(BOOLEAN)) { + return constructBoolean(value); + } else if (type.equals(STRUCT)) { + return constructStruct(value, field); + } else if (type.equals(ARRAY)) { + return constructArray(value, field); + } else if (type.equals(TIMESTAMP)) { + return constructTimestamp(value); + } else if (type.equals(ES_TEXT)) { + return new ElasticsearchExprTextValue(value.asText()); + } else { + throw new IllegalStateException( + String.format( + "Unsupported type: %s for field: %s, value: %s.", type.typeName(), field, value)); + } + } + + private ExprType type(String field) { + if (typeMapping.containsKey(field)) { + return typeMapping.get(field); + } else { + throw new IllegalStateException(String.format("No type found for field: %s.", field)); + } + } + + private ExprIntegerValue constructInteger(JsonNode value) { + return new ExprIntegerValue(value.intValue()); + } + + private ExprLongValue constructLong(JsonNode value) { + return new ExprLongValue(value.longValue()); + } + + private ExprFloatValue constructFloat(JsonNode value) { + return new ExprFloatValue(value.floatValue()); + } + + private ExprDoubleValue constructDouble(JsonNode value) { + return new ExprDoubleValue(value.doubleValue()); + } + + private ExprStringValue constructString(JsonNode value) { + return new ExprStringValue(value.textValue()); + } + + private ExprBooleanValue constructBoolean(JsonNode value) { + return ExprBooleanValue.of(value.booleanValue()); + } + + /** + * Only default strict_date_optional_time||epoch_millis is supported. + * https://www.elastic.co/guide/en/elasticsearch/reference/current/date.html + * The customized date_format is not supported. + */ + private ExprValue constructTimestamp(JsonNode value) { + try { + if (value.getNodeType().equals(JsonNodeType.NUMBER)) { + return new ExprTimestampValue(Instant.ofEpochMilli(value.asLong())); + } else { + return new ExprTimestampValue( + // Using Elasticsearch DateFormatters for now. + DateFormatters.from(DATE_TIME_FORMATTER.parse(value.asText())).toInstant()); + } + } catch (DateTimeParseException e) { + throw new IllegalStateException( + String.format( + "Construct ExprTimestampValue from %s failed, unsupported date format.", value), + e); + } + } + + private ExprTupleValue constructStruct(JsonNode value, String path) { + LinkedHashMap map = new LinkedHashMap<>(); + value + .fieldNames() + .forEachRemaining( + field -> map.put(field, construct(makeField(path, field), value.get(field)))); + return new ExprTupleValue(map); + } + + /** + * Todo. ARRAY is not support now. In Elasticsearch, there is no dedicated array data type. + * https://www.elastic.co/guide/en/elasticsearch/reference/current/array.html. The similar data + * type is nested, but it can only allow a list of objects. + */ + private ExprCollectionValue constructArray(JsonNode value, String path) { + List list = new ArrayList<>(); + value + .elements() + .forEachRemaining( + node -> { + list.add(constructStruct(node, path)); + }); + return new ExprCollectionValue(list); + } + + private String makeField(String path, String field) { + return path.equalsIgnoreCase(TOP_PATH) ? field : String.join(".", path, field); + } +} diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java index 14a082ee0e..accc3bf52d 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndex.java @@ -19,6 +19,8 @@ import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.mapping.IndexMapping; import com.amazon.opendistroforelasticsearch.sql.planner.DefaultImplementor; import com.amazon.opendistroforelasticsearch.sql.planner.logical.LogicalPlan; @@ -36,11 +38,11 @@ public class ElasticsearchIndex implements Table { /** * Type mapping from Elasticsearch data type to expression type in our type system in query - * engine. TODO: date, geo, ip etc. + * engine. TODO: geo, ip etc. */ - private static final Map ES_TYPE_TO_EXPR_TYPE_MAPPING = - ImmutableMap.builder() - .put("text", ExprCoreType.STRING) + private static final Map ES_TYPE_TO_EXPR_TYPE_MAPPING = + ImmutableMap.builder() + .put("text", ElasticsearchDataType.ES_TEXT) .put("keyword", ExprCoreType.STRING) .put("integer", ExprCoreType.INTEGER) .put("long", ExprCoreType.LONG) @@ -49,6 +51,7 @@ public class ElasticsearchIndex implements Table { .put("boolean", ExprCoreType.BOOLEAN) .put("nested", ExprCoreType.ARRAY) .put("object", ExprCoreType.STRUCT) + .put("date", ExprCoreType.TIMESTAMP) .build(); /** Elasticsearch client connection. */ @@ -75,7 +78,8 @@ public Map getFieldTypes() { /** TODO: Push down operations to index scan operator as much as possible in future. */ @Override public PhysicalPlan implement(LogicalPlan plan) { - ElasticsearchIndexScan indexScan = new ElasticsearchIndexScan(client, indexName); + ElasticsearchIndexScan indexScan = new ElasticsearchIndexScan(client, indexName, + new ElasticsearchExprValueFactory(getFieldTypes())); /* * Visit logical plan with index scan as context so logical operators visited, such as @@ -91,7 +95,7 @@ public PhysicalPlan visitRelation(LogicalRelation node, ElasticsearchIndexScan c indexScan); } - private ExprCoreType transformESTypeToExprType(String esType) { + private ExprType transformESTypeToExprType(String esType) { return ES_TYPE_TO_EXPR_TYPE_MAPPING.getOrDefault(esType, ExprCoreType.UNKNOWN); } } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java index e261782700..bf140b5461 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScan.java @@ -17,8 +17,8 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; -import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.request.ElasticsearchRequest; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; import com.amazon.opendistroforelasticsearch.sql.storage.TableScanOperator; @@ -38,15 +38,24 @@ public class ElasticsearchIndexScan extends TableScanOperator { /** Elasticsearch client. */ private final ElasticsearchClient client; + private final ElasticsearchExprValueFactory exprValueFactory; + /** Search request. */ - @EqualsAndHashCode.Include @ToString.Include private final ElasticsearchRequest request; + @EqualsAndHashCode.Include + @ToString.Include + private final ElasticsearchRequest request; /** Search response for current batch. */ private Iterator hits; - public ElasticsearchIndexScan(ElasticsearchClient client, String indexName) { + /** + * Todo. + */ + public ElasticsearchIndexScan(ElasticsearchClient client, String indexName, + ElasticsearchExprValueFactory exprValueFactory) { this.client = client; this.request = new ElasticsearchRequest(indexName); + this.exprValueFactory = exprValueFactory; } @Override @@ -70,7 +79,7 @@ public boolean hasNext() { @Override public ExprValue next() { - return ExprValueUtils.fromObjectValue(hits.next().getSourceAsMap()); + return exprValueFactory.construct(hits.next().getSourceAsString()); } @Override diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/type/ElasticsearchDataTypeTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/type/ElasticsearchDataTypeTest.java new file mode 100644 index 0000000000..7b04afbe56 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/type/ElasticsearchDataTypeTest.java @@ -0,0 +1,44 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type; + +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT_KEYWORD; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +class ElasticsearchDataTypeTest { + @Test + public void testIsCompatible() { + assertTrue(STRING.isCompatible(ES_TEXT)); + assertFalse(ES_TEXT.isCompatible(STRING)); + + assertTrue(STRING.isCompatible(ES_TEXT_KEYWORD)); + assertTrue(ES_TEXT.isCompatible(ES_TEXT_KEYWORD)); + } + + @Test + public void testTypeName() { + assertEquals("string", ES_TEXT.typeName()); + assertEquals("string", ES_TEXT_KEYWORD.typeName()); + } +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextValueTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextValueTest.java new file mode 100644 index 0000000000..2336cc9671 --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprTextValueTest.java @@ -0,0 +1,30 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value; + +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +class ElasticsearchExprTextValueTest { + @Test + public void typeOfExprTextValue() { + assertEquals(ES_TEXT, new ElasticsearchExprTextValue("A").type()); + } +} \ No newline at end of file diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactoryTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactoryTest.java new file mode 100644 index 0000000000..f83b323d3f --- /dev/null +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/data/value/ElasticsearchExprValueFactoryTest.java @@ -0,0 +1,209 @@ +/* + * + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value; + +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.booleanValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.doubleValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.floatValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.integerValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.longValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.nullValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.stringValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.ARRAY; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.DOUBLE; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.FLOAT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRUCT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.TIMESTAMP; +import static com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType.ES_TEXT; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprCollectionValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTimestampValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.time.Instant; +import java.util.LinkedHashMap; +import java.util.Map; +import lombok.EqualsAndHashCode; +import lombok.ToString; +import org.junit.jupiter.api.Test; + +class ElasticsearchExprValueFactoryTest { + + private static final Map MAPPING = + new ImmutableMap.Builder() + .put("intV", INTEGER) + .put("longV", LONG) + .put("floatV", FLOAT) + .put("doubleV", DOUBLE) + .put("stringV", STRING) + .put("dateV", TIMESTAMP) + .put("boolV", BOOLEAN) + .put("structV", STRUCT) + .put("structV.id", INTEGER) + .put("structV.state", STRING) + .put("arrayV", ARRAY) + .put("arrayV.info", STRING) + .put("arrayV.author", STRING) + .put("textV", ES_TEXT) + .build(); + private ElasticsearchExprValueFactory exprValueFactory = + new ElasticsearchExprValueFactory(MAPPING); + + @Test + public void constructNullValue() { + assertEquals(nullValue(), tupleValue("{\"intV\":null}").get("intV")); + } + + @Test + public void constructInteger() { + assertEquals(integerValue(1), tupleValue("{\"intV\":1}").get("intV")); + } + + @Test + public void constructLong() { + assertEquals(longValue(1L), tupleValue("{\"longV\":1}").get("longV")); + } + + @Test + public void constructFloat() { + assertEquals(floatValue(1f), tupleValue("{\"floatV\":1.0}").get("floatV")); + } + + @Test + public void constructDouble() { + assertEquals(doubleValue(1d), tupleValue("{\"doubleV\":1.0}").get("doubleV")); + } + + @Test + public void constructString() { + assertEquals(stringValue("text"), tupleValue("{\"stringV\":\"text\"}").get("stringV")); + } + + @Test + public void constructBoolean() { + assertEquals(booleanValue(true), tupleValue("{\"boolV\":true}").get("boolV")); + } + + @Test + public void constructText() { + assertEquals(new ElasticsearchExprTextValue("text"), tupleValue("{\"textV\":\"text\"}").get( + "textV")); + } + + @Test + public void constructDate() { + assertEquals( + new ExprTimestampValue("2015-01-01 00:00:00"), + tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV")); + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"dateV\":\"2015-01-01T12:10:30Z\"}").get("dateV")); + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"dateV\":\"2015-01-01T12:10:30\"}").get("dateV")); + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"dateV\":\"2015-01-01 12:10:30\"}").get("dateV")); + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + tupleValue("{\"dateV\":1420070400001}").get("dateV")); + } + + @Test + public void constructDateFromUnsupportedFormatThrowException() { + IllegalStateException exception = + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"dateV\":\"2015-01-01 12:10\"}")); + assertEquals( + "Construct ExprTimestampValue from \"2015-01-01 12:10\" failed, " + + "unsupported date format.", + exception.getMessage()); + } + + @Test + public void constructArray() { + assertEquals( + new ExprCollectionValue(ImmutableList.of(new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), + tupleValue("{\"arrayV\":[{\"info\":\"zz\",\"author\":\"au\"}]}").get("arrayV")); + } + + @Test + public void constructStruct() { + assertEquals( + new ExprTupleValue( + new LinkedHashMap() { + { + put("id", integerValue(1)); + put("state", stringValue("WA")); + } + }), + tupleValue("{\"structV\":{\"id\":1,\"state\":\"WA\"}}").get("structV")); + } + + @Test + public void constructFromInvalidJsonThrowException() { + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> tupleValue("{\"invalid_json:1}")); + assertEquals("invalid json: {\"invalid_json:1}.", exception.getMessage()); + } + + @Test + public void noTypeFoundForMappingThrowException() { + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> tupleValue("{\"not_exist\":1}")); + assertEquals("No type found for field: not_exist.", exception.getMessage()); + } + + @Test + public void constructUnsupportedTypeThrowException() { + ElasticsearchExprValueFactory exprValueFactory = + new ElasticsearchExprValueFactory(ImmutableMap.of("type", new TestType())); + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> exprValueFactory.construct("{\"type\":1}")); + assertEquals("Unsupported type: TEST_TYPE for field: type, value: 1.", exception.getMessage()); + } + + public Map tupleValue(String jsonString) { + return (Map) exprValueFactory.construct(jsonString).value(); + } + + @EqualsAndHashCode + @ToString + private static class TestType implements ExprType { + + @Override + public String typeName() { + return "TEST_TYPE"; + } + } +} diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java index 433d7592c1..c5dcf97b55 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java @@ -28,6 +28,7 @@ import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.executor.protector.ElasticsearchExecutionProtector; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.executor.protector.ResourceMonitorPlan; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage.ElasticsearchIndexScan; @@ -58,6 +59,9 @@ class ElasticsearchExecutionProtectorTest { @Mock private ResourceMonitor resourceMonitor; + @Mock + private ElasticsearchExprValueFactory exprValueFactory; + private ElasticsearchExecutionProtector executionProtector; @BeforeEach @@ -71,7 +75,7 @@ public void testProtectIndexScan() { ReferenceExpression include = ref("age", INTEGER); ReferenceExpression exclude = ref("name", STRING); ReferenceExpression dedupeField = ref("name", STRING); - Expression filterExpr = literal(ExprBooleanValue.ofTrue()); + Expression filterExpr = literal(ExprBooleanValue.of(true)); List groupByExprs = Arrays.asList(ref("age", INTEGER)); List aggregators = Arrays.asList(new AvgAggregator(groupByExprs, DOUBLE)); Map mappings = @@ -91,8 +95,9 @@ public void testProtectIndexScan() { PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( filter( - resourceMonitor(new ElasticsearchIndexScan(client, - indexName)), + resourceMonitor( + new ElasticsearchIndexScan( + client, indexName, exprValueFactory)), filterExpr), aggregators, groupByExprs), @@ -112,7 +117,8 @@ public void testProtectIndexScan() { PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( filter( - new ElasticsearchIndexScan(client, indexName), + new ElasticsearchIndexScan( + client, indexName, exprValueFactory), filterExpr), aggregators, groupByExprs), @@ -122,13 +128,12 @@ public void testProtectIndexScan() { sortCount, sortField), dedupeField), - include)) - ); + include))); } @Test public void testWithoutProtection() { - Expression filterExpr = literal(ExprBooleanValue.ofTrue()); + Expression filterExpr = literal(ExprBooleanValue.of(true)); assertEquals( filter( diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java index 12e8caebd3..2ace5a4cc7 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexScanTest.java @@ -16,6 +16,7 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.storage; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -28,7 +29,9 @@ import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.ElasticsearchResponse; +import com.google.common.collect.ImmutableMap; import java.util.Arrays; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.search.SearchHit; @@ -42,12 +45,17 @@ @ExtendWith(MockitoExtension.class) class ElasticsearchIndexScanTest { - @Mock private ElasticsearchClient client; + @Mock + private ElasticsearchClient client; + + private ElasticsearchExprValueFactory exprValueFactory = new ElasticsearchExprValueFactory( + ImmutableMap.of("name", STRING, "department", STRING)); @Test void queryEmptyResult() { mockResponse(); - try (ElasticsearchIndexScan indexScan = new ElasticsearchIndexScan(client, "test")) { + try (ElasticsearchIndexScan indexScan = + new ElasticsearchIndexScan(client, "test", exprValueFactory)) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -57,10 +65,11 @@ void queryEmptyResult() { @Test void queryAllResults() { mockResponse( - new SearchHit[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new SearchHit[] {employee(3, "Allen", "IT")}); + new SearchHit[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new SearchHit[]{employee(3, "Allen", "IT")}); - try (ElasticsearchIndexScan indexScan = new ElasticsearchIndexScan(client, "employees")) { + try (ElasticsearchIndexScan indexScan = + new ElasticsearchIndexScan(client, "employees", exprValueFactory)) { indexScan.open(); assertTrue(indexScan.hasNext()); diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java index 63ebd28a1c..458c2f2ecd 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/storage/ElasticsearchIndexTest.java @@ -37,11 +37,12 @@ import static org.mockito.Mockito.when; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort; -import com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort.SortOption; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.client.ElasticsearchClient; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.type.ElasticsearchDataType; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.data.value.ElasticsearchExprValueFactory; import com.amazon.opendistroforelasticsearch.sql.elasticsearch.mapping.IndexMapping; import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; @@ -65,7 +66,11 @@ @ExtendWith(MockitoExtension.class) class ElasticsearchIndexTest { - @Mock private ElasticsearchClient client; + @Mock + private ElasticsearchClient client; + + @Mock + private ElasticsearchExprValueFactory exprValueFactory; @Test void getFieldTypes() { @@ -93,16 +98,16 @@ void getFieldTypes() { fieldTypes, allOf( aMapWithSize(10), - hasEntry("name", ExprCoreType.STRING), - hasEntry("address", ExprCoreType.STRING), - hasEntry("age", ExprCoreType.INTEGER), + hasEntry("name", (ExprType) ExprCoreType.STRING), + hasEntry("address", (ExprType) ElasticsearchDataType.ES_TEXT), + hasEntry("age", (ExprType) ExprCoreType.INTEGER), hasEntry("account_number", ExprCoreType.LONG), - hasEntry("balance1", ExprCoreType.FLOAT), - hasEntry("balance2", ExprCoreType.DOUBLE), - hasEntry("gender", ExprCoreType.BOOLEAN), - hasEntry("family", ExprCoreType.ARRAY), - hasEntry("employer", ExprCoreType.STRUCT), - hasEntry("birthday", ExprCoreType.UNKNOWN))); + hasEntry("balance1", (ExprType) ExprCoreType.FLOAT), + hasEntry("balance2", (ExprType) ExprCoreType.DOUBLE), + hasEntry("gender", (ExprType) ExprCoreType.BOOLEAN), + hasEntry("family", (ExprType) ExprCoreType.ARRAY), + hasEntry("employer", (ExprType) ExprCoreType.STRUCT), + hasEntry("birthday", (ExprType) ExprCoreType.TIMESTAMP))); } @Test @@ -110,7 +115,8 @@ void implementRelationOperatorOnly() { String indexName = "test"; LogicalPlan plan = relation(indexName); Table index = new ElasticsearchIndex(client, indexName); - assertEquals(new ElasticsearchIndexScan(client, indexName), index.implement(plan)); + assertEquals( + new ElasticsearchIndexScan(client, indexName, exprValueFactory), index.implement(plan)); } @Test @@ -119,7 +125,7 @@ void implementOtherLogicalOperators() { ReferenceExpression include = ref("age", INTEGER); ReferenceExpression exclude = ref("name", STRING); ReferenceExpression dedupeField = ref("name", STRING); - Expression filterExpr = literal(ExprBooleanValue.ofTrue()); + Expression filterExpr = literal(ExprBooleanValue.of(true)); List groupByExprs = Arrays.asList(ref("age", INTEGER)); List aggregators = Arrays.asList(new AvgAggregator(groupByExprs, DOUBLE)); Map mappings = @@ -158,10 +164,12 @@ void implementOtherLogicalOperators() { PhysicalPlanDSL.remove( PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( - PhysicalPlanDSL.filter( - new ElasticsearchIndexScan(client, indexName), filterExpr), - aggregators, - groupByExprs), + PhysicalPlanDSL.filter( + new ElasticsearchIndexScan( + client, indexName, exprValueFactory), + filterExpr), + aggregators, + groupByExprs), mappings), exclude), newEvalField), diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 3719344ff0..472eda4b82 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -23,6 +23,8 @@ repositories { configurations.all { exclude group: "commons-logging", module: "commons-logging" + // enforce 1.1.3, https://www.whitesourcesoftware.com/vulnerability-database/WS-2019-0379 + resolutionStrategy.force 'commons-codec:commons-codec:1.13' } dependencies { @@ -56,12 +58,13 @@ compileTestJava { } } -tasks.integTest.dependsOn(':plugin:bundlePlugin') +tasks.integTest.dependsOn(':plugin:bundlePlugin', ':integ-test:integTestWithNewEngine') testClusters.integTest { testDistribution = 'oss' plugin file(tasks.getByPath(':plugin:bundlePlugin').archiveFile) } +// Run only legacy SQL ITs with new SQL engine disabled integTest.runner { systemProperty 'tests.security.manager', 'false' systemProperty('project.root', project.projectDir.absolutePath) @@ -78,12 +81,44 @@ integTest.runner { jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' } - include 'com/amazon/opendistroforelasticsearch/sql/ppl/**/*IT.class' - include 'com/amazon/opendistroforelasticsearch/sql/legacy/**/*IT.class' + exclude 'com/amazon/opendistroforelasticsearch/sql/ppl/**/*IT.class' + exclude 'com/amazon/opendistroforelasticsearch/sql/sql/**/*IT.class' exclude 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class' exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**' } +// Run PPL ITs and new, legacy and comparison SQL ITs with new SQL engine enabled +task integTestWithNewEngine(type: RestIntegTestTask) { + dependsOn ':plugin:bundlePlugin' + runner { + systemProperty 'tests.security.manager', 'false' + systemProperty('project.root', project.projectDir.absolutePath) + + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + // Enable new SQL engine + systemProperty 'enableNewEngine', 'true' + + // Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for + // requests. The 'doFirst' delays reading the debug setting on the cluster till execution time. + doFirst { systemProperty 'cluster.debug', getDebug() } + + if (System.getProperty("test.debug") != null) { + jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' + } + + exclude 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class' + exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**' + } +} + +testClusters.integTestWithNewEngine { + testDistribution = 'oss' + plugin file(tasks.getByPath(':plugin:bundlePlugin').archiveFile) +} + task docTest(type: RestIntegTestTask) { dependsOn ':plugin:bundlePlugin' @@ -102,6 +137,7 @@ task docTest(type: RestIntegTestTask) { include 'com/amazon/opendistroforelasticsearch/sql/doctest/**/*IT.class' exclude 'com/amazon/opendistroforelasticsearch/sql/correctness/**/*IT.class' exclude 'com/amazon/opendistroforelasticsearch/sql/ppl/**/*IT.class' + exclude 'com/amazon/opendistroforelasticsearch/sql/sql/**/*IT.class' exclude 'com/amazon/opendistroforelasticsearch/sql/legacy/**/*IT.class' } } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/report/FailedTestCase.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/report/FailedTestCase.java index d6a209bde7..15ae2c4f76 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/report/FailedTestCase.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/report/FailedTestCase.java @@ -43,10 +43,17 @@ public class FailedTestCase extends TestCaseReport { */ private final String explain; - public FailedTestCase(int id, String sql, List resultSets) { + /** + * Errors occurred for partial other databases. + */ + private final String errors; + + + public FailedTestCase(int id, String sql, List resultSets, String errors) { super(id, sql, FAILURE); this.resultSets = resultSets; this.resultSets.sort(Comparator.comparing(DBResult::getDatabaseName)); + this.errors = errors; // Generate explanation by diff the first result with remaining this.explain = resultSets.subList(1, resultSets.size()) diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java index 1470ed48bb..333d864b06 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/runner/ComparisonTest.java @@ -140,18 +140,16 @@ private TestCaseReport compareWithOtherDb(String sql, DBResult esResult) { mismatchResults.add(otherDbResult); - // Cannot find any database result match - if (i == otherDbConnections.length - 1) { - return new FailedTestCase(nextId(), sql, mismatchResults); - } } catch (Exception e) { // Ignore and move on to next database reasons.append(extractRootCause(e)).append(";"); } } - // Cannot find any database support this query - return new ErrorTestCase(nextId(), sql, "No other databases support this query: " + reasons); + if (mismatchResults.size() == 1) { // Only ES result on list. Cannot find other database support this query + return new ErrorTestCase(nextId(), sql, "No other databases support this query: " + reasons); + } + return new FailedTestCase(nextId(), sql, mismatchResults, reasons.toString()); } private int nextId() { diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/ComparisonTestTest.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/ComparisonTestTest.java index 3a6e48278e..64276103aa 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/ComparisonTestTest.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/ComparisonTestTest.java @@ -31,6 +31,7 @@ import com.amazon.opendistroforelasticsearch.sql.correctness.runner.resultset.Row; import com.amazon.opendistroforelasticsearch.sql.correctness.runner.resultset.Type; import com.amazon.opendistroforelasticsearch.sql.correctness.testset.TestQuerySet; +import java.util.Collections; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -87,7 +88,7 @@ public void testFailureDueToInconsistency() { TestReport expected = new TestReport(); expected.addTestCase( - new FailedTestCase(1, "SELECT * FROM accounts", asList(esResult, otherDbResult))); + new FailedTestCase(1, "SELECT * FROM accounts", asList(esResult, otherDbResult), "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -137,7 +138,7 @@ public void testFailureDueToEventualInconsistency() { TestReport expected = new TestReport(); expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", - asList(esResult, otherDbResult, anotherDbResult))); + asList(esResult, otherDbResult, anotherDbResult), "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -192,6 +193,31 @@ public void testSuccessWhenOneDBSupportThisQuery() { assertEquals(expected, actual); } + @Test + public void testFailureDueToInconsistencyAndExceptionMixed() { + DBConnection otherDBConnection2 = mock(DBConnection.class); + when(otherDBConnection2.getDatabaseName()).thenReturn("ZZZ DB"); + correctnessTest = new ComparisonTest( + esConnection, new DBConnection[] {otherDbConnection, otherDBConnection2} + ); + + DBResult esResult = + new DBResult("ES", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherResult = + new DBResult("Other", asList(new Type("firstname", "text")), Collections.emptyList()); + + when(esConnection.select(anyString())).thenReturn(esResult); + when(otherDbConnection.select(anyString())).thenReturn(otherResult); + when(otherDBConnection2.select(anyString())) + .thenThrow(new RuntimeException("Unsupported feature")); + + TestReport expected = new TestReport(); + expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", + asList(esResult, otherResult), "Unsupported feature;")); + TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); + assertEquals(expected, actual); + } + private TestQuerySet querySet(String query) { return new TestQuerySet(new String[] {query}); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestReportTest.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestReportTest.java index fa77a98a2b..52f99235cd 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestReportTest.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/correctness/tests/TestReportTest.java @@ -68,8 +68,9 @@ public void testFailedReport() { new DBResult("Elasticsearch", singleton(new Type("firstName", "text")), singleton(new Row(asList("hello")))), new DBResult("H2", singleton(new Type("firstName", "text")), - singleton(new Row(asList("world")))) - ))); + singleton(new Row(asList("world"))))), + "[SQLITE_ERROR] SQL error or missing database;" + )); JSONObject actual = new JSONObject(report); JSONObject expected = new JSONObject( "{" + @@ -84,6 +85,7 @@ public void testFailedReport() { " \"result\": 'Failed'," + " \"sql\": \"SELECT * FROM accounts\"," + " \"explain\": \"Data row at [0] is different: this=[Row(values=[hello])], other=[Row(values=[world])]\"," + + " \"errors\": \"[SQLITE_ERROR] SQL error or missing database;\"," + " \"resultSets\": [" + " {" + " \"database\": \"Elasticsearch\"," + diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/DateFormatIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/DateFormatIT.java index f4f695dbb5..46bff948dd 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/DateFormatIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/DateFormatIT.java @@ -16,6 +16,10 @@ package com.amazon.opendistroforelasticsearch.sql.legacy; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; @@ -116,6 +120,17 @@ public void and() throws SqlParseException { ); } + @Test + public void andWithDefaultTimeZone() throws SqlParseException { + assertThat( + dateQuery(SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + "AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'", + "yyyy-MM-dd HH:mm:ss"), + contains("2014-08-17 16:13:12") + ); + } + @Test public void or() throws SqlParseException { assertThat( @@ -157,6 +172,17 @@ public void sortByAliasedDateFormat() throws IOException { is(new DateTime("2014-08-24T00:00:41.221Z", DateTimeZone.UTC))); } + @Test + public void selectDateTimeWithDefaultTimeZone() throws SqlParseException { + JSONObject response = executeJdbcRequest("SELECT date_format(insert_time, 'yyyy-MM-dd') as date " + + " FROM " + TestsConstants.TEST_INDEX_ONLINE + + " WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + " AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'"); + + verifySchema(response, schema("date", "", "text")); + verifyDataRows(response, rows("2014-08-17")); + } + @Test public void groupByAndSort() throws IOException { JSONObject aggregations = executeQuery( @@ -209,17 +235,19 @@ private void checkAggregations(JSONObject aggregations, String key, } private Set dateQuery(String sql) throws SqlParseException { + return dateQuery(sql, TestsConstants.SIMPLE_DATE_FORMAT); + } + + private Set dateQuery(String sql, String format) throws SqlParseException { try { JSONObject response = executeQuery(sql); - return getResult(response, "insert_time"); + return getResult(response, "insert_time", DateTimeFormat.forPattern(format)); } catch (IOException e) { throw new SqlParseException(String.format("Unable to process query '%s'", sql)); } } - private Set getResult(JSONObject response, String fieldName) { - DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.SIMPLE_DATE_FORMAT); - + private Set getResult(JSONObject response, String fieldName, DateTimeFormatter formatter) { JSONArray hits = getHits(response); Set result = new TreeSet<>(); // Using TreeSet so order is maintained for (int i = 0; i < hits.length(); i++) { @@ -242,4 +270,8 @@ public static String getScriptAggregationKey(JSONObject aggregation, String pref .orElseThrow(() -> new RuntimeException( "Can't find key" + prefix + " in aggregation " + aggregation)); } + + private JSONObject executeJdbcRequest(String query) { + return new JSONObject(executeQuery(query, "jdbc")); + } } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/ObjectFieldSelectIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/ObjectFieldSelectIT.java new file mode 100644 index 0000000000..a437179f98 --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/ObjectFieldSelectIT.java @@ -0,0 +1,127 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.legacy; + +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_DEEP_NESTED; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; + +import com.amazon.opendistroforelasticsearch.sql.legacy.utils.StringUtils; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.Test; + +/** + * Integration test for Elasticsearch object field (and nested field). + * This class is focused on simple SELECT-FROM query to ensure right column + * number and value is returned. + */ +public class ObjectFieldSelectIT extends SQLIntegTestCase { + + @Override + protected void init() throws Exception { + loadIndex(Index.DEEP_NESTED); + } + + @Test + public void testSelectObjectFieldItself() { + JSONObject response = new JSONObject(query("SELECT city FROM %s")); + + verifySchema(response, schema("city", null, "object")); + + // Expect object field itself is returned in a single cell + verifyDataRows(response, + rows(new JSONObject( + "{\n" + + " \"name\": \"Seattle\",\n" + + " \"location\": {\"latitude\": 10.5}\n" + + "}") + ) + ); + } + + @Test + public void testSelectObjectInnerFields() { + JSONObject response = new JSONObject(query( + "SELECT city.location, city.location.latitude FROM %s")); + + verifySchema(response, + schema("city.location", null, "object"), + schema("city.location.latitude", null, "double") + ); + + // Expect inner regular or object field returned in its single cell + verifyDataRows(response, + rows( + new JSONObject("{\"latitude\": 10.5}"), + 10.5 + ) + ); + } + + @Test + public void testSelectNestedFieldItself() { + JSONObject response = new JSONObject(query("SELECT projects FROM %s")); + + // Nested field is absent in ES Get Field Mapping response either hence "object" used + verifySchema(response, schema("projects", null, "object")); + + // Expect nested field itself is returned in a single cell + verifyDataRows(response, + rows(new JSONArray( + "[\n" + + " {\"name\": \"AWS Redshift Spectrum querying\"},\n" + + " {\"name\": \"AWS Redshift security\"},\n" + + " {\"name\": \"AWS Aurora security\"}\n" + + "]") + ) + ); + } + + @Test + public void testSelectObjectFieldOfArrayValuesItself() { + JSONObject response = new JSONObject(query("SELECT accounts FROM %s")); + + // Expect the entire list of values is returned just like a nested field + verifyDataRows(response, + rows(new JSONArray( + "[\n" + + " {\"id\": 1},\n" + + " {\"id\": 2}\n" + + "]") + ) + ); + } + + @Test + public void testSelectObjectFieldOfArrayValuesInnerFields() { + JSONObject response = new JSONObject(query("SELECT accounts.id FROM %s")); + + // We don't support flatten object field of list value so expect null returned + verifyDataRows(response, rows(JSONObject.NULL)); + } + + private String query(String sql) { + return executeQuery( + StringUtils.format(sql, TEST_INDEX_DEEP_NESTED), + "jdbc" + ); + } + +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java index f4a4c36a16..bdf5e09435 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/QueryIT.java @@ -90,6 +90,11 @@ protected void init() throws Exception { loadIndex(Index.BANK_WITH_NULL_VALUES); } + @Test + public void queryEndWithSemiColonTest() { + executeQuery(StringUtils.format("SELECT * FROM %s;", TEST_INDEX_BANK), "jdbc"); + } + @Test public void searchTypeTest() throws IOException { JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 1000", diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java index e1c44a91aa..08292df717 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java @@ -64,6 +64,7 @@ public class SQLFunctionsIT extends SQLIntegTestCase { @Override protected void init() throws Exception { loadIndex(Index.ACCOUNT); + loadIndex(Index.BANK); loadIndex(Index.ONLINE); loadIndex(Index.DATE); } @@ -369,6 +370,54 @@ public void castFieldToDatetimeWithGroupByJdbcFormatTest() { rows("2019-09-25T02:04:13.469Z")); } + @Test + public void castBoolFieldToNumericValueInSelectClause() { + JSONObject response = + executeJdbcRequest( + "SELECT " + + " male, " + + " CAST(male AS INT) AS cast_int, " + + " CAST(male AS LONG) AS cast_long, " + + " CAST(male AS FLOAT) AS cast_float, " + + " CAST(male AS DOUBLE) AS cast_double " + + "FROM " + TestsConstants.TEST_INDEX_BANK + " " + + "WHERE account_number = 1 OR account_number = 13" + ); + + verifySchema(response, + schema("male", "boolean"), + schema("cast_int", "integer"), + schema("cast_long", "long"), + schema("cast_float", "float"), + schema("cast_double", "double") + ); + verifyDataRows(response, + rows(true, 1, 1, 1, 1), + rows(false, 0, 0, 0, 0) + ); + } + + @Test + public void castBoolFieldToNumericValueWithGroupByAlias() { + JSONObject response = + executeJdbcRequest( + "SELECT " + + "CAST(male AS INT) AS cast_int, " + + "COUNT(*) " + + "FROM " + TestsConstants.TEST_INDEX_BANK + " " + + "GROUP BY cast_int" + ); + + verifySchema(response, + schema("cast_int", "cast_int", "double"), //Type is double due to query plan fail to infer + schema("COUNT(*)", "integer") + ); + verifyDataRows(response, + rows("0", 3), + rows("1", 4) + ); + } + @Test public void castStatementInWhereClauseGreaterThanTest() { JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java index 3981aa99d0..aac3497165 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java @@ -22,6 +22,7 @@ import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getBankWithNullValuesIndexMapping; import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getDateIndexMapping; import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getDateTimeIndexMapping; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getDeepNestedIndexMapping; import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getDogIndexMapping; import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getDogs2IndexMapping; import static com.amazon.opendistroforelasticsearch.sql.legacy.TestUtils.getDogs3IndexMapping; @@ -81,6 +82,7 @@ public void setUpIndices() throws Exception { } increaseScriptMaxCompilationsRate(); + enableNewQueryEngine(); init(); } @@ -148,6 +150,13 @@ private void increaseScriptMaxCompilationsRate() throws IOException { new ClusterSetting("transient", "script.max_compilations_rate", "10000/1m")); } + private void enableNewQueryEngine() throws IOException { + boolean isEnabled = Boolean.parseBoolean(System.getProperty("enableNewEngine", "false")); + if (isEnabled) { + com.amazon.opendistroforelasticsearch.sql.util.TestUtils.enableNewQueryEngine(client()); + } + } + protected static void wipeAllClusterSettings() throws IOException { updateClusterSettings(new ClusterSetting("persistent", "*", null)); updateClusterSettings(new ClusterSetting("transient", "*", null)); @@ -511,7 +520,11 @@ public enum Index { NESTED_SIMPLE(TestsConstants.TEST_INDEX_NESTED_SIMPLE, "_doc", getNestedSimpleIndexMapping(), - "src/test/resources/nested_simple.json"); + "src/test/resources/nested_simple.json"), + DEEP_NESTED(TestsConstants.TEST_INDEX_DEEP_NESTED, + "_doc", + getDeepNestedIndexMapping(), + "src/test/resources/deep_nested_index_data.json"); private final String name; private final String type; diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestUtils.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestUtils.java index a44715fb89..1c2d3f5d27 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestUtils.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestUtils.java @@ -233,6 +233,11 @@ public static String getNestedSimpleIndexMapping() { return getMappingFile(mappingFile); } + public static String getDeepNestedIndexMapping() { + String mappingFile = "deep_nested_index_mapping.json"; + return getMappingFile(mappingFile); + } + public static void loadBulk(Client client, String jsonPath, String defaultIndex) throws Exception { System.out.println(String.format("Loading file %s into elasticsearch cluster", jsonPath)); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestsConstants.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestsConstants.java index 30f07d73b5..0eace4b65d 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestsConstants.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/TestsConstants.java @@ -54,6 +54,7 @@ public class TestsConstants { public final static String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; public final static String TEST_INDEX_DATE = TEST_INDEX + "_date"; public final static String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; + public final static String TEST_INDEX_DEEP_NESTED = TEST_INDEX + "_deep_nested"; public final static String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/FieldsCommandIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/FieldsCommandIT.java index e4c5a0f4bd..f44bfbd19f 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/FieldsCommandIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/FieldsCommandIT.java @@ -22,6 +22,7 @@ import java.io.IOException; import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.columnName; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.columnPattern; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyColumn; @@ -31,6 +32,7 @@ public class FieldsCommandIT extends PPLIntegTestCase { @Override public void init() throws IOException { loadIndex(Index.ACCOUNT); + loadIndex(Index.BANK); } @Test @@ -54,4 +56,30 @@ public void testFieldsWildCard() throws IOException { executeQuery(String.format("source=%s | fields ", TEST_INDEX_ACCOUNT) + "firstnam%"); verifyColumn(result, columnPattern("^firstnam.*")); } + + @Test + public void testSelectDateTypeField() throws IOException { + String result = + executeQueryToString( + String.format("source=%s | fields birthdate", TEST_INDEX_BANK)); + assertEquals( + "{\n" + + " \"schema\": [{\n" + + " \"name\": \"birthdate\",\n" + + " \"type\": \"timestamp\"\n" + + " }],\n" + + " \"total\": 7,\n" + + " \"datarows\": [\n" + + " [\"2017-10-23 00:00:00\"],\n" + + " [\"2017-11-20 00:00:00\"],\n" + + " [\"2018-06-23 00:00:00\"],\n" + + " [\"2018-11-13 23:33:20\"],\n" + + " [\"2018-06-27 00:00:00\"],\n" + + " [\"2018-08-19 00:00:00\"],\n" + + " [\"2018-08-11 00:00:00\"]\n" + + " ],\n" + + " \"size\": 7\n" + + "}\n", + result); + } } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/MathematicalFunctionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/MathematicalFunctionIT.java new file mode 100644 index 0000000000..dfd1b1df42 --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/MathematicalFunctionIT.java @@ -0,0 +1,416 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.sql.ppl; + +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.closeTo; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySome; + +import java.io.IOException; +import org.json.JSONObject; +import org.junit.jupiter.api.Test; + +public class MathematicalFunctionIT extends PPLIntegTestCase { + + @Override + public void init() throws IOException { + loadIndex(Index.BANK); + loadIndex(Index.BANK_WITH_NULL_VALUES); + } + + @Test + public void testAbs() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = abs(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "integer")); + verifyDataRows( + result, + rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + } + + @Test + public void testCeil() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = ceil(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "integer")); + verifyDataRows( + result, + rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + } + + @Test + public void testCeiling() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = ceiling(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "integer")); + verifyDataRows( + result, + rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + } + + @Test + public void testE() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = e() | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows( + result, rows(Math.E), rows(Math.E), rows(Math.E), rows(Math.E), + rows(Math.E), rows(Math.E), rows(Math.E)); + } + + @Test + public void testExp() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = exp(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows( + result, rows(Math.exp(32)), rows(Math.exp(36)), rows(Math.exp(28)), rows(Math.exp(33)), + rows(Math.exp(36)), rows(Math.exp(39)), rows(Math.exp(34))); + } + + @Test + public void testFloor() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = floor(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "integer")); + verifyDataRows( + result, + rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + } + + @Test + public void testLn() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = ln(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows( + result, rows(Math.log(32)), rows(Math.log(36)), rows(Math.log(28)), rows(Math.log(33)), + rows(Math.log(36)), rows(Math.log(39)), rows(Math.log(34))); + } + + @Test + public void testLogOneArg() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = log(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows(result, + rows(Math.log(28)), rows(Math.log(32)), rows(Math.log(33)), rows(Math.log(34)), + rows(Math.log(36)), rows(Math.log(36)), rows(Math.log(39)) + ); + } + + @Test + public void testLogTwoArgs() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = log(age, balance) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows( + result, closeTo(Math.log(39225) / Math.log(32)), closeTo(Math.log(5686) / Math.log(36)), + closeTo(Math.log(32838) / Math.log(28)), closeTo(Math.log(4180) / Math.log(33)), + closeTo(Math.log(16418) / Math.log(36)), closeTo(Math.log(40540) / Math.log(39)), + closeTo(Math.log(48086) / Math.log(34))); + } + + @Test + public void testLog10() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = log10(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows( + result, rows(Math.log10(32)), rows(Math.log10(36)), rows(Math.log10(28)), + rows(Math.log10(33)), rows(Math.log10(36)), rows(Math.log10(39)), rows(Math.log10(34))); + } + + @Test + public void testLog2() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = log2(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows( + result, + closeTo(Math.log(32) / Math.log(2)), closeTo(Math.log(36) / Math.log(2)), + closeTo(Math.log(28) / Math.log(2)), closeTo(Math.log(33) / Math.log(2)), + closeTo(Math.log(36) / Math.log(2)), closeTo(Math.log(39) / Math.log(2)), + closeTo(Math.log(34) / Math.log(2))); + } + + @Test + public void testConv() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = conv(age, 10, 16) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "string")); + verifyDataRows( + result, rows("20"), rows("24"), rows("1c"), rows("21"), + rows("24"), rows("27"), rows("22")); + } + + @Test + public void testCrc32() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = crc32(firstname) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "long")); + verifyDataRows( + result, rows(324249283), rows(3369714977L), rows(1165568529), rows(2293694493L), + rows(3936131563L), rows(256963594), rows(824319315)); + } + + @Test + public void testMod() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = mod(age, 10) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "integer")); + verifyDataRows( + result, rows(2), rows(6), rows(8), rows(3), rows(6), rows(9), rows(4)); + } + + @Test + public void testPow() throws IOException { + JSONObject pow = + executeQuery( + String.format( + "source=%s | eval f = pow(age, 2) | fields f", TEST_INDEX_BANK)); + verifySchema(pow, schema("f", null, "double")); + verifyDataRows( + pow, rows(1024), rows(1296), rows(784), rows(1089), rows(1296), rows(1521), rows(1156)); + + JSONObject power = + executeQuery( + String.format( + "source=%s | eval f = power(age, 2) | fields f", TEST_INDEX_BANK)); + verifySchema(power, schema("f", null, "double")); + verifyDataRows( + power, rows(1024), rows(1296), rows(784), rows(1089), rows(1296), rows(1521), rows(1156)); + + } + + @Test + public void testRound() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = round(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "long")); + verifyDataRows(result, + rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + + result = + executeQuery( + String.format( + "source=%s | eval f = round(age, -1) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "long")); + verifyDataRows(result, + rows(30), rows(40), rows(30), rows(30), rows(40), rows(40), rows(30)); + } + + @Test + public void testSign() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = sign(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "integer")); + verifyDataRows( + result, rows(1), rows(1), rows(1), rows(1), rows(1), rows(1), rows(1)); + } + + @Test + public void testSqrt() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = sqrt(age) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows(result, + rows(5.656854249492381), rows(6), rows(5.291502622129181), + rows(5.744562646538029), rows(6), rows(6.244997998398398), + rows(5.830951894845301)); + } + + @Test + public void testTruncate() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = truncate(age, 1) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "long")); + verifyDataRows(result, + rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + + result = + executeQuery( + String.format( + "source=%s | eval f = truncate(age, -1) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "long")); + verifyDataRows(result, + rows(30), rows(30), rows(20), rows(30), rows(30), rows(30), rows(30)); + } + + @Test + public void testPi() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = pi() | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifyDataRows( + result, rows(Math.PI), rows(Math.PI), rows(Math.PI), rows(Math.PI), + rows(Math.PI), rows(Math.PI), rows(Math.PI)); + } + + @Test + public void testRand() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = rand() | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "float")); + + result = + executeQuery( + String.format( + "source=%s | eval f = rand(5) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "float")); + } + + @Test + public void testAcos() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = acos(0) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.acos(0))); + } + + @Test + public void testAsin() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = asin(1) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.asin(1))); + } + + @Test + public void testAtan() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = atan(2) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.atan(2))); + + result = + executeQuery( + String.format( + "source=%s | eval f = atan(2, 3) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); + } + + @Test + public void testAtan2() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = atan2(2, 3) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); + } + + @Test + public void testCos() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = cos(1.57) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.cos(1.57))); + } + + @Test + public void testCot() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = cot(2) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), closeTo(1 / Math.tan(2))); + } + + @Test + public void testDegrees() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = degrees(1.57) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.toDegrees(1.57))); + } + + @Test + public void testRadians() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = radians(90) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.toRadians(90))); + } + + @Test + public void testSin() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = sin(1.57) | fields f", TEST_INDEX_BANK)); + verifySchema(result, schema("f", null, "double")); + verifySome(result.getJSONArray("datarows"), rows(Math.sin(1.57))); + } +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLIntegTestCase.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/CorrectnessTestBase.java similarity index 98% rename from integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLIntegTestCase.java rename to integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/CorrectnessTestBase.java index faef8f713f..d31f297728 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLIntegTestCase.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/CorrectnessTestBase.java @@ -39,7 +39,7 @@ * enforce the success of all tests rather than report failures only. */ @ThreadLeakScope(ThreadLeakScope.Scope.NONE) -public abstract class SQLIntegTestCase extends RestIntegTestCase { +public abstract class CorrectnessTestBase extends RestIntegTestCase { /** * Comparison test runner shared by all methods in this IT class. diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java index 46a0a702a4..754a274f50 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -38,6 +39,7 @@ * and function expression. Since comparison test in {@link SQLCorrectnessIT} is enforced, * this kind of manual written IT class will be focused on anomaly case test. */ +@Ignore public class ExpressionIT extends RestIntegTestCase { @Rule @@ -49,14 +51,6 @@ protected void init() throws Exception { TestUtils.enableNewQueryEngine(client()); } - @Test - public void testDivideZeroExpression() throws Exception { - expectResponseException().hasStatusCode(500) //TODO: should be client error code 400? - .containsMessage("\"reason\": \"/ by zero\"") - .containsMessage("\"type\": \"ArithmeticException\"") - .whenExecute("SELECT 5 / (1 - 1)"); - } - public ResponseExceptionAssertion expectResponseException() { return new ResponseExceptionAssertion(exceptionRule); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/IdentifierIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/IdentifierIT.java new file mode 100644 index 0000000000..21d9614e25 --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/IdentifierIT.java @@ -0,0 +1,98 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.sql; + +import static com.amazon.opendistroforelasticsearch.sql.util.TestUtils.createHiddenIndexByRestClient; +import static com.amazon.opendistroforelasticsearch.sql.util.TestUtils.performRequest; + +import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; +import java.io.IOException; +import org.elasticsearch.client.Request; +import org.junit.jupiter.api.Test; + +/** + * Integration tests for identifiers including index and field name symbol. + */ +public class IdentifierIT extends SQLIntegTestCase { + + @Test + public void testIndexNames() throws IOException { + createIndexWithOneDoc("logs", "logs_2020_01"); + queryAndAssertTheDoc("SELECT * FROM logs"); + queryAndAssertTheDoc("SELECT * FROM logs_2020_01"); + } + + @Test + public void testSpecialIndexNames() throws IOException { + createIndexWithOneDoc(".system", "logs-2020-01"); + queryAndAssertTheDoc("SELECT * FROM .system"); + queryAndAssertTheDoc("SELECT * FROM logs-2020-01"); + } + + @Test + public void testQuotedIndexNames() throws IOException { + createIndexWithOneDoc("logs+2020+01", "logs.2020.01"); + queryAndAssertTheDoc("SELECT * FROM `logs+2020+01`"); + queryAndAssertTheDoc("SELECT * FROM \"logs.2020.01\""); + } + + private void createIndexWithOneDoc(String... indexNames) throws IOException { + for (String indexName : indexNames) { + new Index(indexName).addDoc("{\"age\": 30}"); + } + } + + private void queryAndAssertTheDoc(String sql) { + assertEquals( + "{\n" + + " \"schema\": [{\n" + + " \"name\": \"age\",\n" + + " \"type\": \"long\"\n" + + " }],\n" + + " \"total\": 1,\n" + + " \"datarows\": [[30]],\n" + + " \"size\": 1\n" + + "}\n", + executeQuery(sql.replace("\"", "\\\""), "jdbc") + ); + } + + /** + * Index abstraction for test code readability. + */ + private static class Index { + + private final String indexName; + + Index(String indexName) throws IOException { + this.indexName = indexName; + + if (indexName.startsWith(".")) { + createHiddenIndexByRestClient(client(), indexName, ""); + } else { + executeRequest(new Request("PUT", "/" + indexName)); + } + } + + void addDoc(String doc) { + Request indexDoc = new Request("POST", String.format("/%s/_doc?refresh=true", indexName)); + indexDoc.setJsonEntity(doc); + performRequest(client(), indexDoc); + } + } + +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java new file mode 100644 index 0000000000..5bfadb3c6f --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java @@ -0,0 +1,157 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.sql.sql; + +import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; +import static com.amazon.opendistroforelasticsearch.sql.util.TestUtils.getResponseBody; + +import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; +import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; +import java.io.IOException; +import java.util.Locale; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.json.JSONObject; +import org.junit.jupiter.api.Test; + +public class MathematicalFunctionIT extends SQLIntegTestCase { + + @Override + public void init() throws Exception { + super.init(); + TestUtils.enableNewQueryEngine(client()); + } + + @Test + public void testConv() throws IOException { + JSONObject result = executeQuery("select conv(11, 10, 16)"); + verifySchema(result, schema("conv(11, 10, 16)", null, "string")); + verifyDataRows(result, rows("b")); + + result = executeQuery("select conv(11, 16, 10)"); + verifySchema(result, schema("conv(11, 16, 10)", null, "string")); + verifyDataRows(result, rows("17")); + } + + @Test + public void testCrc32() throws IOException { + JSONObject result = executeQuery("select crc32('MySQL')"); + verifySchema(result, schema("crc32(\"MySQL\")", null, "long")); + verifyDataRows(result, rows(3259397556L)); + } + + @Test + public void testE() throws IOException { + JSONObject result = executeQuery("select e()"); + verifySchema(result, schema("e()", null, "double")); + verifyDataRows(result, rows(Math.E)); + } + + @Test + public void testMod() throws IOException { + JSONObject result = executeQuery("select mod(3, 2)"); + verifySchema(result, schema("mod(3, 2)", null, "integer")); + verifyDataRows(result, rows(1)); + + result = executeQuery("select mod(3.1, 2)"); + verifySchema(result, schema("mod(3.1, 2)", null, "double")); + verifyDataRows(result, rows(1.1)); + } + + @Test + public void testRound() throws IOException { + JSONObject result = executeQuery("select round(56.78)"); + verifySchema(result, schema("round(56.78)", null, "double")); + verifyDataRows(result, rows(57)); + + result = executeQuery("select round(56.78, 1)"); + verifySchema(result, schema("round(56.78, 1)", null, "double")); + verifyDataRows(result, rows(56.8)); + + result = executeQuery("select round(56.78, -1)"); + verifySchema(result, schema("round(56.78, -1)", null, "double")); + verifyDataRows(result, rows(60)); + + result = executeQuery("select round(-56)"); + verifySchema(result, schema("round(-56)", null, "long")); + verifyDataRows(result, rows(-56)); + + result = executeQuery("select round(-56, 1)"); + verifySchema(result, schema("round(-56, 1)", null, "long")); + verifyDataRows(result, rows(-56)); + + result = executeQuery("select round(-56, -1)"); + verifySchema(result, schema("round(-56, -1)", null, "long")); + verifyDataRows(result, rows(-60)); + } + + /** + * Test sign function with double value. + */ + @Test + public void testSign() throws IOException { + JSONObject result = executeQuery("select sign(1.1)"); + verifySchema(result, schema("sign(1.1)", null, "integer")); + verifyDataRows(result, rows(1)); + + result = executeQuery("select sign(-1.1)"); + verifySchema(result, schema("sign(-1.1)", null, "integer")); + verifyDataRows(result, rows(-1)); + } + + @Test + public void testTruncate() throws IOException { + JSONObject result = executeQuery("select truncate(56.78, 1)"); + verifySchema(result, schema("truncate(56.78, 1)", null, "double")); + verifyDataRows(result, rows(56.7)); + + result = executeQuery("select truncate(56.78, -1)"); + verifySchema(result, schema("truncate(56.78, -1)", null, "double")); + verifyDataRows(result, rows(50)); + + result = executeQuery("select truncate(-56, 1)"); + verifySchema(result, schema("truncate(-56, 1)", null, "long")); + verifyDataRows(result, rows(-56)); + + result = executeQuery("select truncate(-56, -1)"); + verifySchema(result, schema("truncate(-56, -1)", null, "long")); + verifyDataRows(result, rows(-50)); + } + + @Test + public void testAtan() throws IOException { + JSONObject result = executeQuery("select atan(2, 3)"); + verifySchema(result, schema("atan(2, 3)", null, "double")); + verifyDataRows(result, rows(Math.atan2(2, 3))); + } + + protected JSONObject executeQuery(String query) throws IOException { + Request request = new Request("POST", QUERY_API_ENDPOINT); + request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + return new JSONObject(getResponseBody(response)); + } +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java index 7ad193b985..58c10073a1 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java @@ -28,7 +28,7 @@ /** * SQL integration test automated by comparison test framework. */ -public class SQLCorrectnessIT extends SQLIntegTestCase { +public class SQLCorrectnessIT extends CorrectnessTestBase { private static final String ROOT_DIR = "correctness/"; private static final String[] EXPR_TEST_DIR = { "expressions" }; diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java index 0668b06641..50d803e020 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/MatcherUtils.java @@ -194,6 +194,11 @@ public static void verifyOrder(JSONArray array, Matcher... matchers) { assertThat(objects, containsInRelativeOrder(matchers)); } + public static TypeSafeMatcher schema(String expectedName, + String expectedType) { + return schema(expectedName, null, expectedType); + } + public static TypeSafeMatcher schema(String expectedName, String expectedAlias, String expectedType) { return new TypeSafeMatcher() { @@ -226,9 +231,51 @@ public void describeTo(Description description) { @Override protected boolean matchesSafely(JSONArray array) { - List actualObjects = new ArrayList<>(); - array.iterator().forEachRemaining(actualObjects::add); - return Arrays.asList(expectedObjects).equals(actualObjects); + if (array.length() != expectedObjects.length) { + return false; + } + + for (int i = 0; i < expectedObjects.length; i++) { + Object expected = expectedObjects[i]; + boolean isEqual; + + // Use similar() because JSONObject/JSONArray.equals() only check if same reference + if (expected instanceof JSONObject) { + isEqual = ((JSONObject) expected).similar(array.get(i)); + } else if (expected instanceof JSONArray) { + isEqual = ((JSONArray) expected).similar(array.get(i)); + } else { + isEqual = expected.equals(array.get(i)); + } + + if (!isEqual) { + return false; + } + } + return true; + } + }; + } + + public static TypeSafeMatcher closeTo(Number... values) { + final double error = 1e-10; + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(JSONArray item) { + List expectedValues = new ArrayList<>(Arrays.asList(values)); + List actualValues = new ArrayList<>(); + item.iterator().forEachRemaining(v -> actualValues.add((Number) v)); + return actualValues.stream() + .allMatch(v -> valuesAreClose(v, expectedValues.get(actualValues.indexOf(v)))); + } + + @Override + public void describeTo(Description description) { + description.appendText(String.join(",", Arrays.asList().toString())); + } + + private boolean valuesAreClose(Number v1, Number v2) { + return Math.abs(v1.doubleValue() - v2.doubleValue()) <= error; } }; } diff --git a/integ-test/src/test/resources/bank.json b/integ-test/src/test/resources/bank.json index bc1525144f..12f1b89816 100644 --- a/integ-test/src/test/resources/bank.json +++ b/integ-test/src/test/resources/bank.json @@ -5,7 +5,7 @@ {"index":{"_id":"13"}} {"account_number":13,"balance":32838,"firstname":"Nanette","lastname":"Bates","age":28,"gender":"F","address":"789 Madison Street","employer":"Quility","email":"nanettebates@quility.com","city":"Nogal","state":"VA", "male" : false, "birthdate" : "2018-06-23"} {"index":{"_id":"18"}} -{"account_number":18,"balance":4180,"firstname":"Dale","lastname":"Adams","age":33,"gender":"M","address":"467 Hutchinson Court","employer":"Boink","email":"daleadams@boink.com","city":"Orick","state":"MD","male" : true, "birthdate" : "1542152000"} +{"account_number":18,"balance":4180,"firstname":"Dale","lastname":"Adams","age":33,"gender":"M","address":"467 Hutchinson Court","employer":"Boink","email":"daleadams@boink.com","city":"Orick","state":"MD","male" : true, "birthdate" : 1542152000000} {"index":{"_id":"20"}} {"account_number":20,"balance":16418,"firstname":"Elinor","lastname":"Ratliff","age":36,"gender":"M","address":"282 Kings Place","employer":"Scentric","email":"elinorratliff@scentric.com","city":"Ribera","state":"WA", "male" : true, "birthdate" : "2018-06-27"} {"index":{"_id":"25"}} diff --git a/integ-test/src/test/resources/correctness/expressions/functions.txt b/integ-test/src/test/resources/correctness/expressions/functions.txt index 5b67c88b66..fc6120d0ce 100644 --- a/integ-test/src/test/resources/correctness/expressions/functions.txt +++ b/integ-test/src/test/resources/correctness/expressions/functions.txt @@ -4,4 +4,72 @@ abs(1) abs(-1.234) abs(0.0) abs(4.321) -abs(abs(-1.2) * -1) \ No newline at end of file +abs(abs(-1.2) * -1) +ceil(1) +ceil(-1) +ceil(0.0) +ceil(0.4999) +ceil(abs(1)) +exp(0) +exp(1) +exp(-1) +exp(exp(1) + ceil(-1)) +floor(1) +floor(-1) +floor(0.0) +floor(0.4999) +floor(abs(-1)) +log(2) +log(2.1) +log(log(2)) +log10(2) +log10(2.1) +log10(log10(2)) +pi() +power(2, 2) +power(2, -2) +power(2.1, 2) +power(2, -2.1) +power(abs(2), 2) +sign(0) +sign(-1) +sign(1) +sign(abs(1)) +sqrt(0) +sqrt(1) +sqrt(1.1) +sqrt(abs(1)) +acos(0) +acos(0.5) +acos(-0.5) +acos(1) +acos(-1) +asin(0) +asin(0.5) +asin(-0.5) +asin(1) +asin(-1) +atan(0) +atan(1) +atan(-1) +atan2(2, 1) +atan2(-2, 1) +atan2(2, -1) +atan2(-2, -1) +cos(0) +cos(1.57) +cos(-1.57) +cot(1) +cot(-1) +degrees(0) +degrees(1.57) +degrees(-1.57) +radians(0) +radians(90) +radians(-90) +sin(0) +sin(1.57) +sin(-1.57) +tan(0) +tan(1.57) +tan(-1.57) diff --git a/integ-test/src/test/resources/correctness/queries/select.txt b/integ-test/src/test/resources/correctness/queries/select.txt index 18d34e7ee9..adb7f40782 100644 --- a/integ-test/src/test/resources/correctness/queries/select.txt +++ b/integ-test/src/test/resources/correctness/queries/select.txt @@ -1,2 +1,4 @@ +SELECT 1 + 2 FROM kibana_sample_data_flights +SELECT abs(-10) FROM kibana_sample_data_flights SELECT DistanceMiles FROM kibana_sample_data_flights SELECT AvgTicketPrice, Carrier FROM kibana_sample_data_flights WHERE AvgTicketPrice <= 500 diff --git a/integ-test/src/test/resources/deep_nested_index_data.json b/integ-test/src/test/resources/deep_nested_index_data.json new file mode 100644 index 0000000000..5e3883d516 --- /dev/null +++ b/integ-test/src/test/resources/deep_nested_index_data.json @@ -0,0 +1,2 @@ +{"index":{"_id":"1"}} +{"city": {"name": "Seattle", "location": {"latitude": 10.5}}, "accounts": [{"id": 1}, {"id": 2}], "projects": [{"name": "AWS Redshift Spectrum querying"}, {"name": "AWS Redshift security"}, {"name": "AWS Aurora security"}] } diff --git a/integ-test/src/test/resources/doctest/templates/dql/basics.rst b/integ-test/src/test/resources/doctest/templates/dql/basics.rst index 34cd5d3d09..4c8877d20a 100644 --- a/integ-test/src/test/resources/doctest/templates/dql/basics.rst +++ b/integ-test/src/test/resources/doctest/templates/dql/basics.rst @@ -28,6 +28,8 @@ The syntax of ``SELECT`` statement is as follows:: [ORDER BY expression [IS [NOT] NULL] [ASC | DESC] [, ...]] [LIMIT [offset, ] size] +Although multiple query statements to execute in batch is not supported, ending with semicolon ``;`` is still allowed. For example, you can run ``SELECT * FROM accounts;`` without issue. This is useful to support queries generated by other tool, such as Microsoft Excel or BI tool. + Fundamentals ------------ diff --git a/integ-test/src/test/resources/indexDefinitions/deep_nested_index_mapping.json b/integ-test/src/test/resources/indexDefinitions/deep_nested_index_mapping.json new file mode 100644 index 0000000000..6c4ef1f7b2 --- /dev/null +++ b/integ-test/src/test/resources/indexDefinitions/deep_nested_index_mapping.json @@ -0,0 +1,35 @@ +{ + "mappings": { + "properties": { + "city": { + "properties": { + "name": { + "type": "keyword" + }, + "location": { + "properties": { + "latitude": { + "type": "double" + } + } + } + } + }, + "account": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "projects": { + "type": "nested", + "properties": { + "name": { + "type": "keyword" + } + } + } + } + } +} \ No newline at end of file diff --git a/legacy/build.gradle b/legacy/build.gradle index 192d718cf9..5e43ac29c1 100644 --- a/legacy/build.gradle +++ b/legacy/build.gradle @@ -60,7 +60,13 @@ dependencies { compile group: 'org.locationtech.spatial4j', name: 'spatial4j', version:'0.7' compile group: "org.elasticsearch.plugin", name: 'parent-join-client', version: "${es_version}" compile group: "org.elasticsearch.plugin", name: 'reindex-client', version: "${es_version}" - compile group: 'com.google.guava', name: 'guava', version:'23.0' + constraints { + implementation('commons-codec:commons-codec:1.13') { + because 'https://www.whitesourcesoftware.com/vulnerability-database/WS-2019-0379' + } + } + // https://github.com/google/guava/wiki/CVE-2018-10237 + implementation group: 'com.google.guava', name: 'guava', version: '29.0-jre' compile group: 'org.json', name: 'json', version:'20180813' compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.10' compile group: 'org.elasticsearch', name: 'elasticsearch', version: "${es_version}" diff --git a/legacy/src/main/antlr/OpenDistroSqlParser.g4 b/legacy/src/main/antlr/OpenDistroSqlParser.g4 index ce30892e16..86c5c89d20 100644 --- a/legacy/src/main/antlr/OpenDistroSqlParser.g4 +++ b/legacy/src/main/antlr/OpenDistroSqlParser.g4 @@ -32,7 +32,7 @@ options { tokenVocab=OpenDistroSqlLexer; } // Root rule root - : sqlStatement? EOF + : sqlStatement? SEMI? EOF ; // Only SELECT, DELETE, SHOW and DSCRIBE are supported for now diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMapping.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMapping.java index a586fc60fd..f60fdca192 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMapping.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMapping.java @@ -16,6 +16,7 @@ package com.amazon.opendistroforelasticsearch.sql.legacy.esdomain.mapping; import com.amazon.opendistroforelasticsearch.sql.legacy.domain.Field; +import com.amazon.opendistroforelasticsearch.sql.legacy.executor.format.DescribeResultSet; import com.amazon.opendistroforelasticsearch.sql.legacy.utils.StringUtils; import java.util.Map; @@ -119,24 +120,26 @@ public String path() { } /** - * Used to retrieve the type of fields from metaData map structures for both regular and nested fields + * Find field type in ES Get Field Mapping API response. Note that Get Field Mapping API does NOT return + * the type for object or nested field. In this case, object type is used as default under the assumption + * that the field queried here must exist (which is true if semantic analyzer is enabled). + * + * @return field type if found in mapping, otherwise "object" type returned */ @SuppressWarnings("unchecked") public String type() { - FieldMappingMetadata metaData = typeMappings.get(fieldName); + FieldMappingMetadata metaData = typeMappings.getOrDefault(fieldName, FieldMappingMetadata.NULL); + if (metaData.isNull()) { + return DescribeResultSet.DEFAULT_OBJECT_DATATYPE; + } + Map source = metaData.sourceAsMap(); String[] fieldPath = fieldName.split("\\."); - /* - * When field is not nested the metaData source is fieldName -> type - * When it is nested or contains "." in general (ex. fieldName.nestedName) the source is nestedName -> type - */ - String root = (fieldPath.length == 1) ? fieldName : fieldPath[1]; - Map fieldMapping = (Map) source.get(root); - for (int i = 2; i < fieldPath.length; i++) { - fieldMapping = (Map) fieldMapping.get(fieldPath[i]); - } - + // For object/nested field, fieldName is full path though only innermost field name present in mapping + // For example, fieldName='employee.location.city', metaData='{"city":{"type":"text"}}' + String innermostFieldName = (fieldPath.length == 1) ? fieldName : fieldPath[fieldPath.length - 1]; + Map fieldMapping = (Map) source.get(innermostFieldName); return (String) fieldMapping.get("type"); } diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/DescribeResultSet.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/DescribeResultSet.java index 388b3259bd..8da696f21c 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/DescribeResultSet.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/DescribeResultSet.java @@ -40,7 +40,7 @@ public class DescribeResultSet extends ResultSet { * You are not required to set the field type to object explicitly, as this is the default value. * https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html */ - private static final String DEFAULT_OBJECT_DATATYPE = "object"; + public static final String DEFAULT_OBJECT_DATATYPE = "object"; private IndexStatement statement; private Object queryResult; diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/Schema.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/Schema.java index b2b8994719..92b4b02b43 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/Schema.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/Schema.java @@ -114,6 +114,8 @@ public enum Type { DATE, // Date types BOOLEAN, // Boolean types BINARY, // Binary types + OBJECT, + NESTED, INTEGER_RANGE, FLOAT_RANGE, LONG_RANGE, DOUBLE_RANGE, DATE_RANGE; // Range types public String nameLowerCase() { diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/SelectResultSet.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/SelectResultSet.java index 7a952061b5..a6f4dadeef 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/SelectResultSet.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/executor/format/SelectResultSet.java @@ -437,6 +437,7 @@ private List populateColumns(Query query, String[] fieldNames, Ma // _score is a special case since it is not included in typeMappings, so it is checked for here if (fieldName.equals(SCORE)) { columns.add(new Schema.Column(fieldName, fetchAlias(fieldName, fieldMap), Schema.Type.FLOAT)); + continue; } /* * Methods are also a special case as their type cannot be determined from typeMappings, so it is checked @@ -465,6 +466,7 @@ private List populateColumns(Query query, String[] fieldNames, Ma fetchMethodReturnType(fieldIndex, methodField) ) ); + continue; } /* @@ -473,7 +475,7 @@ private List populateColumns(Query query, String[] fieldNames, Ma * explicitly selected. */ FieldMapping field = new FieldMapping(fieldName, typeMappings, fieldMap); - if (typeMappings.containsKey(fieldName) && !field.isMetaField()) { + if (!field.isMetaField()) { if (field.isMultiField() && !field.isSpecified()) { continue; diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/ESActionFactory.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/ESActionFactory.java index ac05da3236..eeb3efe49a 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/ESActionFactory.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/ESActionFactory.java @@ -87,8 +87,11 @@ public static QueryAction create(Client client, String sql) public static QueryAction create(Client client, QueryActionRequest request) throws SqlParseException, SQLFeatureNotSupportedException { String sql = request.getSql(); - // Linebreak matcher + // Remove line breaker anywhere and semicolon at the end sql = sql.replaceAll("\\R", " ").trim(); + if (sql.endsWith(";")) { + sql = sql.substring(0, sql.length() - 1); + } switch (getFirstWord(sql)) { case "SELECT": diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/maker/Maker.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/maker/Maker.java index 1a711e533c..18caafa507 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/maker/Maker.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/query/maker/Maker.java @@ -76,6 +76,11 @@ public abstract class Maker { + /** + * UTC. + */ + private static final ZoneId UTC = ZoneId.of("UTC"); + public static final Object NONE = new Object(); public static final Set queryFunctions = Sets.newHashSet( @@ -409,7 +414,8 @@ private ToXContent makeForDateFormat(SQLMethodInvokeExpr nameExpr, SQLCharExpr v if (params.size() > 2) { zoneId = ZoneId.of(removeSingleQuote(params.get(2).toString())).toString(); } else { - zoneId = ZoneId.systemDefault().toString(); + // Using UTC, if there is no Zone provided. + zoneId = UTC.getId(); } RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(field).format(format).timeZone(zoneId); diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/SQLFunctions.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/SQLFunctions.java index 27f0045e64..ba635522dd 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/SQLFunctions.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/SQLFunctions.java @@ -527,7 +527,7 @@ private Tuple date_format(SQLExpr field, String pattern, String String name = nextId("date_format"); if (valueName == null) { return new Tuple<>(name, "def " + name + " = DateTimeFormatter.ofPattern('" + pattern + "').withZone(" - + (zoneId != null ? "ZoneId.of('" + zoneId + "')" : "ZoneId.systemDefault()") + + (zoneId != null ? "ZoneId.of('" + zoneId + "')" : "ZoneId.of(\"UTC\")") + ").format(Instant.ofEpochMilli(" + getPropertyOrValue(field) + ".toInstant().toEpochMilli()))"); } else { return new Tuple<>(name, exprString(field) + "; " @@ -973,13 +973,10 @@ public String getCastScriptStatement(String name, String castType, List String castFieldName = String.format("doc['%s'].value", paramers.get(0).toString()); switch (StringUtils.toUpper(castType)) { case "INT": - return String.format("def %s = Double.parseDouble(%s.toString()).intValue()", name, castFieldName); case "LONG": - return String.format("def %s = Double.parseDouble(%s.toString()).longValue()", name, castFieldName); case "FLOAT": - return String.format("def %s = Double.parseDouble(%s.toString()).floatValue()", name, castFieldName); case "DOUBLE": - return String.format("def %s = Double.parseDouble(%s.toString()).doubleValue()", name, castFieldName); + return getCastToNumericValueScript(name, castFieldName, StringUtils.toLower(castType)); case "STRING": return String.format("def %s = %s.toString()", name, castFieldName); case "DATETIME": @@ -990,6 +987,14 @@ public String getCastScriptStatement(String name, String castType, List } } + private String getCastToNumericValueScript(String varName, String docValue, String targetType) { + String script = + "def %1$s = (%2$s instanceof boolean) " + + "? (%2$s ? 1 : 0) " + + ": Double.parseDouble(%2$s.toString()).%3$sValue()"; + return StringUtils.format(script, varName, docValue, targetType); + } + /** * Returns return type of script function. This is simple approach, that might be not the best solution in the long * term. For example - for JDBC, if the column type in index is INTEGER, and the query is "select column+5", current diff --git a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/SyntaxAnalysisTest.java b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/SyntaxAnalysisTest.java index af7fceecd9..b15c5090cf 100644 --- a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/SyntaxAnalysisTest.java +++ b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/antlr/SyntaxAnalysisTest.java @@ -79,7 +79,7 @@ public void missingWhereKeywordShouldThrowException() { expectValidationFailWithErrorMessage( "SELECT * FROM accounts age = 1", "offending symbol [=]", // parser thought 'age' is alias of 'accounts' and failed at '=' - "Expecting", "'WHERE'" // "Expecting tokens in {, 'INNER', 'JOIN', ... 'WHERE', ','}" + "Expecting", ";" // "Expecting tokens in {, ';'}" ); } @@ -130,6 +130,11 @@ public void arithmeticExpressionInWhereClauseShouldPass() { validate("SELECT * FROM accounts WHERE age + 1 = 10"); } + @Test + public void queryEndWithSemiColonShouldPass() { + validate("SELECT * FROM accounts;"); + } + private void expectValidationFailWithErrorMessage(String query, String... messages) { exception.expect(SyntaxAnalysisException.class); exception.expectMessage(allOf(Arrays.stream(messages). diff --git a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMappingTest.java b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMappingTest.java index 72f55ab0d0..fdca3ed9a5 100644 --- a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMappingTest.java +++ b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/esdomain/mapping/FieldMappingTest.java @@ -15,18 +15,20 @@ package com.amazon.opendistroforelasticsearch.sql.legacy.esdomain.mapping; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + import com.amazon.opendistroforelasticsearch.sql.legacy.domain.Field; import com.amazon.opendistroforelasticsearch.sql.legacy.util.MatcherUtils; -import org.hamcrest.Matcher; -import org.junit.Test; - +import com.google.common.collect.ImmutableMap; import java.util.Arrays; import java.util.Map; import java.util.stream.Collectors; - -import static java.util.Collections.emptyMap; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import org.elasticsearch.common.bytes.BytesArray; +import org.hamcrest.Matcher; +import org.junit.Test; /** * Unit test for {@code FieldMapping} with trivial methods ignored such as isSpecified, isMetaField etc. @@ -81,6 +83,35 @@ public void testMultiFieldIsNotProperty() { ); } + @Test + public void testUnknownFieldTreatedAsObject() { + assertThat( + new FieldMapping("employee"), + hasType("object") + ); + } + + @Test + public void testDeepNestedField() { + assertThat( + new FieldMapping( + "employee.location.city", + ImmutableMap.of( + "employee.location.city", + new FieldMappingMetadata("employee.location.city", new BytesArray( + "{\n" + + " \"city\" : {\n" + + " \"type\" : \"text\"\n" + + " }\n" + + "}") + ) + ), + emptyMap() + ), + hasType("text") + ); + } + private Matcher isWildcardSpecified(boolean isMatched) { return MatcherUtils.featureValueOf("is field match wildcard specified in query", is(isMatched), @@ -93,6 +124,12 @@ private Matcher isPropertyField(boolean isProperty) { FieldMapping::isPropertyField); } + private Matcher hasType(String expected) { + return MatcherUtils.featureValueOf("type", + is(expected), + FieldMapping::type); + } + private Map fieldsSpecifiedInQuery(String...fieldNames) { return Arrays.stream(fieldNames). collect(Collectors.toMap(name -> name, diff --git a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java index cf3f4b6ff7..9ed790bfcc 100644 --- a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java +++ b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/RestSQLQueryActionTest.java @@ -67,8 +67,8 @@ public void skipExplainThatNotSupport() { @Test public void skipQueryThatNotSupport() { SQLQueryRequest request = new SQLQueryRequest( - new JSONObject("{\"query\": \"SELECT * FROM test\"}"), - "SELECT * FROM test", + new JSONObject("{\"query\": \"SELECT * FROM test WHERE age = 10\"}"), + "SELECT * FROM test WHERE age = 10", QUERY_API_ENDPOINT, ""); diff --git a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/unittest/utils/SQLFunctionsTest.java b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/unittest/utils/SQLFunctionsTest.java index f324e3f9e1..f8a664928b 100644 --- a/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/unittest/utils/SQLFunctionsTest.java +++ b/legacy/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/unittest/utils/SQLFunctionsTest.java @@ -30,6 +30,7 @@ import com.amazon.opendistroforelasticsearch.sql.legacy.executor.format.Schema; import com.amazon.opendistroforelasticsearch.sql.legacy.utils.SQLFunctions; import com.google.common.collect.ImmutableList; +import java.util.Arrays; import org.elasticsearch.common.collect.Tuple; import org.junit.Assert; import org.junit.Rule; @@ -39,11 +40,12 @@ import java.util.ArrayList; import java.util.List; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class SQLFunctionsTest { - private SQLFunctions sqlFunctions; + private SQLFunctions sqlFunctions = new SQLFunctions(); @Rule public ExpectedException exceptionRule = ExpectedException.none(); @@ -96,4 +98,16 @@ public void testCastReturnType() { final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); Assert.assertEquals(returnType, Schema.Type.INTEGER); } + + @Test + public void testCastIntStatementScript() throws SqlParseException { + assertEquals( + "def result = (doc['age'].value instanceof boolean) " + + "? (doc['age'].value ? 1 : 0) " + + ": Double.parseDouble(doc['age'].value.toString()).intValue()", + sqlFunctions.getCastScriptStatement( + "result", "int", Arrays.asList(new KVValue("age"))) + ); + } + } \ No newline at end of file diff --git a/plugin/build.gradle b/plugin/build.gradle index 71430b95de..3ba7d4081a 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -31,6 +31,8 @@ thirdPartyAudit.enabled = false configurations.all { // conflict with spring-jcl exclude group: "commons-logging", module: "commons-logging" + // enforce 1.1.3, https://www.whitesourcesoftware.com/vulnerability-database/WS-2019-0379 + resolutionStrategy.force 'commons-codec:commons-codec:1.13' } dependencies { diff --git a/ppl/build.gradle b/ppl/build.gradle index 3818d805f4..6f55868cc2 100644 --- a/ppl/build.gradle +++ b/ppl/build.gradle @@ -25,7 +25,8 @@ dependencies { antlr "org.antlr:antlr4:4.7.1" compile "org.antlr:antlr4-runtime:4.7.1" - compile group: 'com.google.guava', name: 'guava', version: '23.0' + // https://github.com/google/guava/wiki/CVE-2018-10237 + compile group: 'com.google.guava', name: 'guava', version: '29.0-jre' compile group: 'org.elasticsearch', name: 'elasticsearch-x-content', version: "${es_version}" compile group: 'org.json', name: 'json', version: '20180813' compile group: 'org.springframework', name: 'spring-context', version: '5.2.5.RELEASE' diff --git a/ppl/src/main/antlr/OpenDistroPPLLexer.g4 b/ppl/src/main/antlr/OpenDistroPPLLexer.g4 index 8b5eb77e10..16564a6e21 100644 --- a/ppl/src/main/antlr/OpenDistroPPLLexer.g4 +++ b/ppl/src/main/antlr/OpenDistroPPLLexer.g4 @@ -135,115 +135,38 @@ DC: 'DC'; // BASIC FUNCTIONS ABS: 'ABS'; -ACOS: 'ACOS'; -ADD: 'ADD'; -ADDDATE: 'ADDDATE'; -ADDTIME: 'ADDTIME'; -ASCII: 'ASCII'; -ASIN: 'ASIN'; -ATAN: 'ATAN'; -ATAN2: 'ATAN2'; -CBRT: 'CBRT'; CEIL: 'CEIL'; -CONCAT: 'CONCAT'; -CONCAT_WS: 'CONCAT_WS'; -COS: 'COS'; -COSH: 'COSH'; -COT: 'COT'; -CURDATE: 'CURDATE'; -DATE: 'DATE'; -DATE_FORMAT: 'DATE_FORMAT'; -DAYOFMONTH: 'DAYOFMONTH'; -DEGREES: 'DEGREES'; +CEILING: 'CEILING'; +CONV: 'CONV'; +CRC32: 'CRC32'; E: 'E'; EXP: 'EXP'; -EXPM1: 'EXPM1'; FLOOR: 'FLOOR'; -GREATEST: 'GREATEST'; -IF: 'IF'; -IFNULL: 'IFNULL'; -ISNULL: 'ISNULL'; -LEAST: 'LEAST'; -LEFT: 'LEFT'; -LENGTH: 'LENGTH'; LN: 'LN'; -LOCATE: 'LOCATE'; LOG: 'LOG'; LOG10: 'LOG10'; LOG2: 'LOG2'; -LOWER: 'LOWER'; -LTRIM: 'LTRIM'; -MAKETIME: 'MAKETIME'; -MISSING: 'MISSING'; -MODULUS: 'MODULUS'; -MONTH: 'MONTH'; -MONTHNAME: 'MONTHNAME'; -MULTIPLY: 'MULTIPLY'; -NOW: 'NOW'; +MOD: 'MOD'; PI: 'PI'; POW: 'POW'; POWER: 'POWER'; -RADIANS: 'RADIANS'; RAND: 'RAND'; -REPLACE: 'REPLACE'; -RIGHT: 'RIGHT'; -RINT: 'RINT'; ROUND: 'ROUND'; -RTRIM: 'RTRIM'; SIGN: 'SIGN'; -SIGNUM: 'SIGNUM'; -SIN: 'SIN'; -SINH: 'SINH'; SQRT: 'SQRT'; -SUBTRACT: 'SUBTRACT'; -SUBSTRING: 'SUBSTRING'; -TAN: 'TAN'; -TIMESTAMP: 'TIMESTAMP'; -TRIM: 'TRIM'; -UPPER: 'UPPER'; -YEAR: 'YEAR'; +TRUNCATE: 'TRUNCATE'; -// ES FUNCTIONS -DATE_HISTOGRAM: 'DATE_HISTOGRAM'; -DAY_OF_MONTH: 'DAY_OF_MONTH'; -DAY_OF_YEAR: 'DAY_OF_YEAR'; -DAY_OF_WEEK: 'DAY_OF_WEEK'; -EXCLUDE: 'EXCLUDE'; -EXTENDED_STATS: 'EXTENDED_STATS'; -FIELD: 'FIELD'; -FILTER: 'FILTER'; -GEO_BOUNDING_BOX: 'GEO_BOUNDING_BOX'; -GEO_CELL: 'GEO_CELL'; -GEO_DISTANCE: 'GEO_DISTANCE'; -GEO_DISTANCE_RANGE: 'GEO_DISTANCE_RANGE'; -GEO_INTERSECTS: 'GEO_INTERSECTS'; -GEO_POLYGON: 'GEO_POLYGON'; -HISTOGRAM: 'HISTOGRAM'; -HOUR_OF_DAY: 'HOUR_OF_DAY'; -INCLUDE: 'INCLUDE'; -IN_TERMS: 'IN_TERMS'; -MATCHPHRASE: 'MATCHPHRASE'; -MATCH_PHRASE: 'MATCH_PHRASE'; -MATCHQUERY: 'MATCHQUERY'; -MATCH_QUERY: 'MATCH_QUERY'; -MINUTE_OF_DAY: 'MINUTE_OF_DAY'; -MINUTE_OF_HOUR: 'MINUTE_OF_HOUR'; -MONTH_OF_YEAR: 'MONTH_OF_YEAR'; -MULTIMATCH: 'MULTIMATCH'; -MULTI_MATCH: 'MULTI_MATCH'; -NESTED: 'NESTED'; -PERCENTILES: 'PERCENTILES'; -REGEXP_QUERY: 'REGEXP_QUERY'; -REVERSE_NESTED: 'REVERSE_NESTED'; -QUERY: 'QUERY'; -SCORE: 'SCORE'; -SECOND_OF_MINUTE: 'SECOND_OF_MINUTE'; -TERM: 'TERM'; -TERMS: 'TERMS'; -TOPHITS: 'TOPHITS'; -WEEK_OF_YEAR: 'WEEK_OF_YEAR'; -WILDCARDQUERY: 'WILDCARDQUERY'; -WILDCARD_QUERY: 'WILDCARD_QUERY'; +// TRIGONOMETRIC FUNCTIONS +ACOS: 'ACOS'; +ASIN: 'ASIN'; +ATAN: 'ATAN'; +ATAN2: 'ATAN2'; +COS: 'COS'; +COT: 'COT'; +DEGREES: 'DEGREES'; +RADIANS: 'RADIANS'; +SIN: 'SIN'; +TAN: 'TAN'; // LITERALS AND VALUES //STRING_LITERAL: DQUOTA_STRING | SQUOTA_STRING | BQUOTA_STRING; @@ -251,7 +174,7 @@ ID: ID_LITERAL; INTEGER_LITERAL: DEC_DIGIT+; DECIMAL_LITERAL: (DEC_DIGIT+)? '.' DEC_DIGIT+; -fragment ID_LITERAL: [A-Z_$0-9@]*?[A-Z_$\-]+?[A-Z_$\-0-9]*; +fragment ID_LITERAL: [A-Z_]+[A-Z_$0-9@\-]*; DQUOTA_STRING: '"' ( '\\'. | '""' | ~('"'| '\\') )* '"'; SQUOTA_STRING: '\'' ('\\'. | '\'\'' | ~('\'' | '\\'))* '\''; BQUOTA_STRING: '`' ( '\\'. | '``' | ~('`'|'\\'))* '`'; diff --git a/ppl/src/main/antlr/OpenDistroPPLParser.g4 b/ppl/src/main/antlr/OpenDistroPPLParser.g4 index f9e3cdb7e3..d6ee05fc93 100644 --- a/ppl/src/main/antlr/OpenDistroPPLParser.g4 +++ b/ppl/src/main/antlr/OpenDistroPPLParser.g4 @@ -195,7 +195,7 @@ evalFunctionName ; functionArgs - : functionArg (COMMA functionArg)* + : (functionArg (COMMA functionArg)*)? ; functionArg @@ -203,7 +203,13 @@ functionArg ; mathematicalFunctionBase - : ABS + : ABS | CEIL | CEILING | CONV | CRC32 | E | EXP | FLOOR | LN | LOG | LOG10 | LOG2 | MOD | PI |POW | POWER + | RAND | ROUND | SIGN | SQRT | TRUNCATE + | trigonometricFunctionName + ; + +trigonometricFunctionName + : ACOS | ASIN | ATAN | ATAN2 | COS | COT | DEGREES | RADIANS | SIN | TAN ; dateAndTimeFunctionBase @@ -226,8 +232,8 @@ binaryOperator /** literals and values*/ literalValue : stringLiteral - | (PLUS | MINUS)? integerLiteral - | (PLUS | MINUS)? decimalLiteral + | integerLiteral + | decimalLiteral | booleanLiteral ; @@ -236,11 +242,11 @@ stringLiteral ; integerLiteral - : INTEGER_LITERAL + : (PLUS | MINUS)? INTEGER_LITERAL ; decimalLiteral - : DECIMAL_LITERAL + : (PLUS | MINUS)? DECIMAL_LITERAL ; booleanLiteral diff --git a/protocol/build.gradle b/protocol/build.gradle index 8a90dea360..db00d7f8f0 100644 --- a/protocol/build.gradle +++ b/protocol/build.gradle @@ -9,7 +9,8 @@ repositories { } dependencies { - compile group: 'com.google.guava', name: 'guava', version: '23.0' + // https://github.com/google/guava/wiki/CVE-2018-10237 + compile group: 'com.google.guava', name: 'guava', version: '29.0-jre' compile group: 'org.json', name: 'json', version: '20180813' //TODO: change to other JSON lib? compile project(':core') diff --git a/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java b/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java index 45dfd5bf88..389b259861 100644 --- a/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java +++ b/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/QueryResult.java @@ -93,7 +93,7 @@ private Object[] convertExprValuesToValues(Collection exprValues) { } private String getTypeString(ExprValue exprValue) { - return exprValue.type().name().toLowerCase(); + return exprValue.type().typeName().toLowerCase(); } } diff --git a/release-notes/opendistro-elasticsearch-sql.release-notes-1.9.0.1.md b/release-notes/opendistro-elasticsearch-sql.release-notes-1.9.0.1.md new file mode 100644 index 0000000000..36a7d94be4 --- /dev/null +++ b/release-notes/opendistro-elasticsearch-sql.release-notes-1.9.0.1.md @@ -0,0 +1,44 @@ +## 2020-07-29 Version 1.9.0.1 + +### Feature +* ODBC: Change Tableau connector version, support Tableau and Excel ([#622](https://github.com/opendistro-for-elasticsearch/sql/pull/622)) +* Support trignometric functions acos, asin, atan, atan2, cos, cot, degrees, radians, sin, tan ([#599](https://github.com/opendistro-for-elasticsearch/sql/pull/599)) +* Support mathmatical functions rand and constants e, pi ([#591](https://github.com/opendistro-for-elasticsearch/sql/pull/591)) +* Support SELET * and FROM clause in new SQL parser ([#573](https://github.com/opendistro-for-elasticsearch/sql/pull/573)) +* Support mathmatical functions: conv, crc32, mod, pow/power, round, sign, sqrt, truncate ([#577](https://github.com/opendistro-for-elasticsearch/sql/pull/577)) +* add date and time support ([#568](https://github.com/opendistro-for-elasticsearch/sql/pull/568)) +* Support mathematical functions ceil/ceiling, exp, floor, ln, log ([#540](https://github.com/opendistro-for-elasticsearch/sql/pull/540)) + +### Enhancement +* Support queres end with semi colon ([#609](https://github.com/opendistro-for-elasticsearch/sql/pull/609)) +* ODBC: Adding BASIC & AWS_SIGV4 auth in M Connector ([#610](https://github.com/opendistro-for-elasticsearch/sql/pull/610)) +* ODBC: adding manual test plan for Microsoft Excel testing ([#604](https://github.com/opendistro-for-elasticsearch/sql/pull/604)) +* ODBC: Report error from Excel when executing an invalid query ([#611](https://github.com/opendistro-for-elasticsearch/sql/pull/611)) +* Add ElasticsarchExprValueFactory in StorageEngine ([#608](https://github.com/opendistro-for-elasticsearch/sql/pull/608)) +* Using UTC asdefault timezone for date_format function if not provided ([#605](https://github.com/opendistro-for-elasticsearch/sql/pull/605)) +* ODBC: AddingPower BI M connector ([#596](https://github.com/opendistro-for-elasticsearch/sql/pull/596)) +* ODBC: add ODC 2.x functions called by Excel for Mac ([#592](https://github.com/opendistro-for-elasticsearch/sql/pull/592)) +* ODBC: Remove catalog support from driver ([#566](https://github.com/opendistro-for-elasticsearch/sql/pull/566)) +* ODBC: Build driver files in parallel ([#570](https://github.com/opendistro-for-elasticsearch/sql/pull/570)) +* Keep mismatch results when error occurs in comparison test ([#557](https://github.com/opendistro-for-elasticsearch/sql/pull/557)) + +### Bug Fixes +* Move workbench down in kibana nav ([#578](https://github.com/opendistro-for-elasticsearch/sql/pull/578)) +* ODBC: Fix fo data loading failure in Power BI Desktop ([#627](https://github.com/opendistro-for-elasticsearch/sql/pull/627)) +* Issue 623, fix security vulnerability regarding to depedencies commons-codec and Guava ([#624](https://github.com/opendistro-for-elasticsearch/sql/pull/624)) +* Extra fixes or Mac ODBC driver ([#602](https://github.com/opendistro-for-elasticsearch/sql/pull/602)) +* Fix CAST boo field to integer issue ([#600](https://github.com/opendistro-for-elasticsearch/sql/pull/600)) +* Bumped lodash version to fix dependency security vulnerability ([#598](https://github.com/opendistro-for-elasticsearch/sql/pull/598)) +* Fix object/nsted field select issue ([#584](https://github.com/opendistro-for-elasticsearch/sql/pull/584)) +* Remove columns from result which are not returned by SELECT * query but returned by DESCRIBE query ([#556](https://github.com/opendistro-for-elasticsearch/sql/pull/556)) + +### Infrastructure +* Add workflow to draft release on push ([#572](https://github.com/opendistro-for-elasticsearch/sql/pull/572)) +* Merge all SQL repos and adjust workflows ([#549](https://github.com/opendistro-for-elasticsearch/sql/pull/549)) + +### Documentation +* Update docs fter merging repos ([#563](https://github.com/opendistro-for-elasticsearch/sql/pull/563)) +* ODBC: Updatig Microsoft Excel connection documents ([#581](https://github.com/opendistro-for-elasticsearch/sql/pull/581)) +* ODBC: Add usr documentation for using Microsoft Excel on Mac ([#594](https://github.com/opendistro-for-elasticsearch/sql/pull/594)) +* ODBC: Update documentation for using Microsoft Excel with Open Distro For Elasticsearch ([#576](https://github.com/opendistro-for-elasticsearch/sql/pull/576)) +* ODBC: Add douments for `Refresh` & `Export as CSV files` options in Microsoft Excel ([#571](https://github.com/opendistro-for-elasticsearch/sql/pull/571)) diff --git a/sql-cli/CONTRIBUTING.md b/sql-cli/CONTRIBUTING.md index 5cdffe3fd4..688d58a0a3 100644 --- a/sql-cli/CONTRIBUTING.md +++ b/sql-cli/CONTRIBUTING.md @@ -11,7 +11,7 @@ information to effectively respond to your bug report or contribution. We welcome you to use the GitHub issue tracker to report bugs or suggest features. -When filing an issue, please check [existing open](https://github.com/opendistro-for-elasticsearch/sql-cli/issues), or [recently closed](https://github.com/opendistro-for-elasticsearch/sql-cli/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already +When filing an issue, please check [existing open](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aopen+is%3Aissue+label%3ACLI), or [recently closed](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aissue+is%3Aclosed+label%3ACLI), issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: * A reproducible test case or series of steps @@ -41,7 +41,7 @@ GitHub provides additional document on [forking a repository](https://help.githu ## Finding contributions to work on -Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/opendistro-for-elasticsearch/sql-cli/labels/help%20wanted) issues is a great place to start. +Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aopen+label%3A%22help+wanted%22+label%3ACLI) issues is a great place to start. ## Code of Conduct @@ -56,6 +56,6 @@ If you discover a potential security issue in this project we ask that you notif ## Licensing -See the [LICENSE](https://github.com/opendistro-for-elasticsearch/sql-cli/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. +See the [LICENSE](https://github.com/opendistro-for-elasticsearch/sql/blob/master/sql-cli/LICENSE.TXT) file for our project's licensing. We will ask you to confirm the licensing of your contribution. We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. diff --git a/sql-cli/README.md b/sql-cli/README.md index 6425739606..2f24128176 100644 --- a/sql-cli/README.md +++ b/sql-cli/README.md @@ -1,4 +1,4 @@ -[![Test and Build Workflow](https://github.com/opendistro-for-elasticsearch/sql-cli/workflows/Test%20and%20Build/badge.svg)](https://github.com/opendistro-for-elasticsearch/sql-cli/actions) +[![SQL CLI Test and Build](https://github.com/opendistro-for-elasticsearch/sql/workflows/SQL%20CLI%20Test%20and%20Build/badge.svg)](https://github.com/opendistro-for-elasticsearch/sql/actions) [![Latest Version](https://img.shields.io/pypi/v/odfe-sql-cli.svg)](https://pypi.python.org/pypi/odfe-sql-cli/) [![Documentation](https://img.shields.io/badge/documentation-blue.svg)](https://opendistro.github.io/for-elasticsearch-docs/docs/sql/cli/) [![Chat](https://img.shields.io/badge/chat-on%20forums-blue)](https://discuss.opendistrocommunity.dev/c/sql/) @@ -80,13 +80,13 @@ You can also configure the following connection properties: * Elasticsearch with X-pack security enabled * `--aws-auth`: Turns on AWS sigV4 authentication to connect to an Amazon Elasticsearch Service endpoint. Use with the AWS CLI (`aws configure`) to retrieve the local AWS configuration to authenticate and connect. -For a list of all available configurations, see [clirc](https://github.com/opendistro-for-elasticsearch/sql-cli/blob/master/src/conf/clirc). +For a list of all available configurations, see [clirc](https://github.com/opendistro-for-elasticsearch/sql/blob/master/sql-cli/src/odfe_sql_cli/conf/clirc). ## Using the CLI -1. Save the sample [accounts test data](https://github.com/opendistro-for-elasticsearch/sql/blob/master/src/test/resources/doctest/testdata/accounts.json) file. +1. Save the sample [accounts test data](https://github.com/opendistro-for-elasticsearch/sql/blob/master/integ-test/src/test/resources/accounts.json) file. 2. Index the sample data. ``` @@ -133,7 +133,7 @@ If you discover a potential security issue in this project we ask that you notif ## Licensing -See the [LICENSE](https://github.com/opendistro-for-elasticsearch/sql-cli/blob/master/LICENSE.TXT) file for our project's licensing. We will ask you to confirm the licensing of your contribution. +See the [LICENSE](https://github.com/opendistro-for-elasticsearch/sql/blob/master/sql-cli/LICENSE.TXT) file for our project's licensing. We will ask you to confirm the licensing of your contribution. diff --git a/sql-cli/src/odfe_sql_cli/__init__.py b/sql-cli/src/odfe_sql_cli/__init__.py index 3851f216ed..bef8a15b09 100644 --- a/sql-cli/src/odfe_sql_cli/__init__.py +++ b/sql-cli/src/odfe_sql_cli/__init__.py @@ -12,4 +12,4 @@ express or implied. See the License for the specific language governing permissions and limitations under the License. """ -__version__ = "1.9.0.0" +__version__ = "1.9.0.1" diff --git a/sql-jdbc/CONTRIBUTING.md b/sql-jdbc/CONTRIBUTING.md index 2af8e8f96b..46d764bc31 100644 --- a/sql-jdbc/CONTRIBUTING.md +++ b/sql-jdbc/CONTRIBUTING.md @@ -11,7 +11,7 @@ information to effectively respond to your bug report or contribution. We welcome you to use the GitHub issue tracker to report bugs or suggest features. -When filing an issue, please check [existing open](../../issues), or [recently closed](../../issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already +When filing an issue, please check [existing open](../../../issues?q=is%3Aopen+is%3Aissue+label%3AJDBC), or [recently closed](../../../issues?q=is%3Aissue+is%3Aclosed+label%3AJDBC), issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: * A reproducible test case or series of steps @@ -41,7 +41,7 @@ GitHub provides additional document on [forking a repository](https://help.githu ## Finding contributions to work on -Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](../../labels/help%20wanted) issues is a great place to start. +Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](../../../issues?q=is%3Aopen+label%3A%22help+wanted%22+label%3AJDBC) issues is a great place to start. ## Code of Conduct diff --git a/sql-jdbc/build.gradle b/sql-jdbc/build.gradle index 4719102196..0bf3cb26f0 100644 --- a/sql-jdbc/build.gradle +++ b/sql-jdbc/build.gradle @@ -32,7 +32,7 @@ plugins { group 'com.amazon.opendistroforelasticsearch.client' // keep version in sync with version in Driver source -version '1.9.0.0' +version '1.9.0.1' boolean snapshot = "true".equals(System.getProperty("build.snapshot", "true")); if (snapshot) { diff --git a/sql-jdbc/docs/tableau.md b/sql-jdbc/docs/tableau.md index eded78d03e..c572a7b23a 100644 --- a/sql-jdbc/docs/tableau.md +++ b/sql-jdbc/docs/tableau.md @@ -4,7 +4,7 @@ * Download and install [Tableau Desktop](https://www.tableau.com/en-ca/products/desktop/download). * Install and configure [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/). -* Download the [Open Distro for ElasticSearch JDBC Driver](https://github.com/opendistro-for-elasticsearch/sql-jdbc#download-and-installation). +* Download the [Open Distro for ElasticSearch JDBC Driver](https://github.com/opendistro-for-elasticsearch/sql/blob/master/sql-jdbc/README.md#download-and-installation). ## Setup @@ -47,7 +47,7 @@ For futher details check [using a .tdc file with Tableau](https://kb.tableau.com ### Connection information You will need: -* [JDBC connection string](https://github.com/opendistro-for-elasticsearch/sql-jdbc#connection-url-and-other-settings) to enter in the URL field when you connect. +* [JDBC connection string](https://github.com/opendistro-for-elasticsearch/sql/blob/master/sql-jdbc/README.md#connection-url-and-other-settings) to enter in the URL field when you connect. Sample connection string for connecting to localhost: `jdbc:elasticsearch://localhost:9200`. diff --git a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java index debe3333ef..0f74a5ed8d 100644 --- a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java +++ b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/internal/Version.java @@ -19,7 +19,7 @@ public enum Version { // keep this in sync with the gradle version - Current(1, 9, 0, 0); + Current(1, 9, 0, 1); private int major; private int minor; diff --git a/sql-odbc/.gitignore b/sql-odbc/.gitignore index 3dd5ca542e..ecaefb1c61 100644 --- a/sql-odbc/.gitignore +++ b/sql-odbc/.gitignore @@ -56,3 +56,6 @@ CPackSourceConfig.cmake CTestTestfile.cmake /sdk-build64/ /cmake-build64/ +/src/PowerBIConnector/bin/Debug/ +/src/PowerBIConnector/obj/ +/src/PowerBIConnector/.vs/ diff --git a/sql-odbc/BUILD_INSTRUCTIONS.md b/sql-odbc/BUILD_INSTRUCTIONS.md index cefda0e6a5..36c5202b36 100644 --- a/sql-odbc/BUILD_INSTRUCTIONS.md +++ b/sql-odbc/BUILD_INSTRUCTIONS.md @@ -4,7 +4,7 @@ The ElasticsearchODBC driver can be build on Windows and Mac. ## Setting up Dependencies -The driver [source code](https://github.com/opendistro-for-elasticsearch/sql-odbc) must be downloaded onto the system to build it. +The driver [source code](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-odbc) must be downloaded onto the system to build it. ### Windows Dependencies diff --git a/sql-odbc/CONTRIBUTING.md b/sql-odbc/CONTRIBUTING.md index f25870380c..2cc7222a5c 100644 --- a/sql-odbc/CONTRIBUTING.md +++ b/sql-odbc/CONTRIBUTING.md @@ -11,7 +11,7 @@ information to effectively respond to your bug report or contribution. We welcome you to use the GitHub issue tracker to report bugs or suggest features. -When filing an issue, please check [existing open](https://github.com/OpenDistro/elasticsearch-security-tlstool/issues), or [recently closed](https://github.com/OpenDistro/elasticsearch-security-tlstool/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already +When filing an issue, please check [existing open](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aopen+is%3Aissue+label%3AODBC), or [recently closed](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aissue+is%3Aclosed+label%3AODBC), issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: * A reproducible test case or series of steps @@ -41,7 +41,7 @@ GitHub provides additional document on [forking a repository](https://help.githu ## Finding contributions to work on -Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/OpenDistro/elasticsearch-security-tlstool/labels/help%20wanted) issues is a great place to start. +Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](issues?q=is%3Aopen+label%3A%22help+wanted%22+label%3AODBC) issues is a great place to start. ## Code of Conduct @@ -56,6 +56,6 @@ If you discover a potential security issue in this project we ask that you notif ## Licensing -See the [LICENSE](https://github.com/OpenDistro/elasticsearch-security-tlstool/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. +See the [LICENSE](https://github.com/opendistro-for-elasticsearch/sql/blob/master/sql-odbc/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. diff --git a/sql-odbc/build_mac_debug64.sh b/sql-odbc/build_mac_debug64.sh new file mode 100755 index 0000000000..3522137921 --- /dev/null +++ b/sql-odbc/build_mac_debug64.sh @@ -0,0 +1,14 @@ +# Build AWS SDK +# $BITNESS=64 + +cd src +git clone -b "1.7.329" "https://github.com/aws/aws-sdk-cpp.git" +cd .. + +PREFIX_PATH=$(pwd) +mkdir cmake-build64 +cd cmake-build64 +cmake ../src -DCMAKE_INSTALL_PREFIX=${PREFIX_PATH}/AWSSDK/ -DCMAKE_BUILD_TYPE=Debug -DBUILD_ONLY="core" -DCUSTOM_MEMORY_MANAGEMENT="OFF" -DENABLE_RTTI="OFF" -DENABLE_TESTING="OFF" +cd .. + +cmake --build cmake-build64 -- -j 4 diff --git a/sql-odbc/build_mac_release64.sh b/sql-odbc/build_mac_release64.sh new file mode 100755 index 0000000000..707a0ee53f --- /dev/null +++ b/sql-odbc/build_mac_release64.sh @@ -0,0 +1,14 @@ +# Build AWS SDK +# $BITNESS=64 + +cd src +git clone -b "1.7.329" "https://github.com/aws/aws-sdk-cpp.git" +cd .. + +PREFIX_PATH=$(pwd) +mkdir cmake-build64 +cd cmake-build64 +cmake ../src -DCMAKE_INSTALL_PREFIX=${PREFIX_PATH}/AWSSDK/ -DCMAKE_BUILD_TYPE=Release -DBUILD_ONLY="core" -DCUSTOM_MEMORY_MANAGEMENT="OFF" -DENABLE_RTTI="OFF" -DENABLE_TESTING="OFF" +cd .. + +cmake --build cmake-build64 -- -j 4 diff --git a/sql-odbc/build_win_debug32.ps1 b/sql-odbc/build_win_debug32.ps1 index 2c64f9c413..add9a03dfb 100644 --- a/sql-odbc/build_win_debug32.ps1 +++ b/sql-odbc/build_win_debug32.ps1 @@ -12,8 +12,8 @@ git clone "https://github.com/aws/aws-sdk-cpp.git" $prefix_path = (pwd).path cmake .\aws-sdk-cpp -A Win32 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Debug -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" -msbuild ALL_BUILD.vcxproj /p:Configuration=Debug -msbuild INSTALL.vcxproj /p:Configuration=Debug +msbuild ALL_BUILD.vcxproj /m /p:Configuration=Debug +msbuild INSTALL.vcxproj /m /p:Configuration=Debug cd .. @@ -21,6 +21,7 @@ cd .. cmake -S src -B cmake-build${BITNESS} -A Win32 -D CMAKE_INSTALL_PREFIX=sdk-build${BITNESS}\AWSSDK\ -D BUILD_WITH_TESTS=ON # # Build Project -cmake --build .\cmake-build${BITNESS} --config Debug +cmake --build .\cmake-build${BITNESS} --config Debug --parallel 4 -msbuild cmake-build32\PACKAGE.vcxproj -p:Configuration=Debug \ No newline at end of file +cp .\sdk-build32\bin\Debug\* .\bin32\Debug +cp .\cmake-build32\bin\Debug\* .\bin32\Debug diff --git a/sql-odbc/build_win_debug64.ps1 b/sql-odbc/build_win_debug64.ps1 index 2e38670790..998dffedca 100644 --- a/sql-odbc/build_win_debug64.ps1 +++ b/sql-odbc/build_win_debug64.ps1 @@ -12,8 +12,8 @@ git clone "https://github.com/aws/aws-sdk-cpp.git" $prefix_path = (pwd).path cmake .\aws-sdk-cpp -A x64 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Debug -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" -msbuild ALL_BUILD.vcxproj /p:Configuration=Debug -msbuild INSTALL.vcxproj /p:Configuration=Debug +msbuild ALL_BUILD.vcxproj /m /p:Configuration=Debug +msbuild INSTALL.vcxproj /m /p:Configuration=Debug cd .. @@ -21,6 +21,7 @@ cd .. cmake -S src -B cmake-build${BITNESS} -A x64 -D CMAKE_INSTALL_PREFIX=sdk-build${BITNESS}\AWSSDK\ -D BUILD_WITH_TESTS=ON # # Build Project -cmake --build .\cmake-build${BITNESS} --config Debug +cmake --build .\cmake-build${BITNESS} --config Debug --parallel 4 -cp .\sdk-build64\bin\Debug\* .\bin64\Debug \ No newline at end of file +cp .\sdk-build64\bin\Debug\* .\bin64\Debug +cp .\cmake-build64\bin\Debug\* .\bin64\Debug diff --git a/sql-odbc/build_win_release32.ps1 b/sql-odbc/build_win_release32.ps1 index ebf707e167..b693778c94 100644 --- a/sql-odbc/build_win_release32.ps1 +++ b/sql-odbc/build_win_release32.ps1 @@ -12,8 +12,8 @@ git clone "https://github.com/aws/aws-sdk-cpp.git" $prefix_path = (pwd).path cmake .\aws-sdk-cpp -A Win32 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Release -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" -msbuild ALL_BUILD.vcxproj /p:Configuration=Release -msbuild INSTALL.vcxproj /p:Configuration=Release +msbuild ALL_BUILD.vcxproj /m /p:Configuration=Release +msbuild INSTALL.vcxproj /m /p:Configuration=Release cd .. @@ -21,4 +21,7 @@ cd .. cmake -S src -B cmake-build${BITNESS} -A Win32 -D CMAKE_INSTALL_PREFIX=sdk-build${BITNESS}\AWSSDK\ -D BUILD_WITH_TESTS=ON # # Build Project -cmake --build .\cmake-build${BITNESS} --config Release +cmake --build .\cmake-build${BITNESS} --config Release --parallel 4 + +cp .\sdk-build32\bin\Release\* .\bin32\Release +cp .\cmake-build32\bin\Release\* .\bin32\Release diff --git a/sql-odbc/build_win_release64.ps1 b/sql-odbc/build_win_release64.ps1 index c39d0f9757..15a917219f 100644 --- a/sql-odbc/build_win_release64.ps1 +++ b/sql-odbc/build_win_release64.ps1 @@ -12,8 +12,8 @@ git clone "https://github.com/aws/aws-sdk-cpp.git" $prefix_path = (pwd).path cmake .\aws-sdk-cpp -A x64 -D CMAKE_INSTALL_PREFIX=${prefix_path}\AWSSDK\ -D CMAKE_BUILD_TYPE=Release -D BUILD_ONLY="core" -D CUSTOM_MEMORY_MANAGEMENT="OFF" -D ENABLE_RTTI="OFF" -D ENABLE_TESTING="OFF" -msbuild ALL_BUILD.vcxproj /p:Configuration=Release -msbuild INSTALL.vcxproj /p:Configuration=Release +msbuild ALL_BUILD.vcxproj /m /p:Configuration=Release +msbuild INSTALL.vcxproj /m /p:Configuration=Release cd .. @@ -21,6 +21,7 @@ cd .. cmake -S src -B cmake-build64 -A x64 -D CMAKE_INSTALL_PREFIX=sdk-build64\AWSSDK\ -D BUILD_WITH_TESTS=ON # # Build Project -cmake --build .\cmake-build64 --config Release +cmake --build .\cmake-build64 --config Release --parallel 4 -cp .\sdk-build64\bin\Release\* .\bin64\Release \ No newline at end of file +cp .\sdk-build64\bin\Release\* .\bin64\Release +cp .\cmake-build64\bin\Release\* .\bin64\Release diff --git a/sql-odbc/docs/test/data_connection_wizard.md b/sql-odbc/docs/test/data_connection_wizard.md new file mode 100644 index 0000000000..cf2494021f --- /dev/null +++ b/sql-odbc/docs/test/data_connection_wizard.md @@ -0,0 +1,34 @@ +## Data Connection Wizard + +* Open blank workbook in Microsoft Excel. +* Click on **Data** > **Get Data** > **Legacy Wizards** > **From Data Connection Wizard (Legacy)** + + + +* Select **ODBC DSN** and Click on **Next**. + + + +* Select **ODFE SQL ODBC DSN** and Click on **Next**. + + + +* Select required table and Click on **Next**. + + + +* Add the description. Edit file names if required. Select whether to use this connection file while refreshing data. Click on **Finish**. + + + +* Select the worksheet to load data. Click on **OK**. + + + +* Data will be loaded in the spreadsheet. + + + +**NOTE**: If **Legacy Wizards** is disabled in Microsoft Excel, It can enabled by **File** > **Options** > **Data** > **Show legacy data import wizards** > Check **From Data Connection Wizard (Legacy)**. + + diff --git a/sql-odbc/docs/test/excel_connection.md b/sql-odbc/docs/test/excel_connection.md index 1514ba6181..8491085f9e 100644 --- a/sql-odbc/docs/test/excel_connection.md +++ b/sql-odbc/docs/test/excel_connection.md @@ -12,3 +12,9 @@ There are multiple ways to load data from Elasticsearch in Microsoft Excel. * [ODBC as Data Source](odbc_data_source_connection.md) * [Microsoft Query](microsoft_query_connection.md) * [Query Wizard](query_wizard_connection.md) +* [Data Connection Wizard (Legacy)](data_connection_wizard.md) + +## Use Microsoft Excel with data loaded from Open distro for Elasticsearch + +* [Refresh the data](refresh_data.md) +* [Exporting CSV files](exporting_csv_files.md) diff --git a/sql-odbc/docs/test/exporting_csv_files.md b/sql-odbc/docs/test/exporting_csv_files.md new file mode 100644 index 0000000000..393b3f07d5 --- /dev/null +++ b/sql-odbc/docs/test/exporting_csv_files.md @@ -0,0 +1,22 @@ +## Exporting CSV files in Microsoft Excel + +* Load data from Open Distro For Elasticsearch by any using any [connection mode](excel_connection.md). + +For this example, [ODBC](odbc_data_source_connection.md) connection with SQL statement `SELECT * FROM kibana_sample_data_flights LIMIT 50` in advanced options is used. + + + +* Click on **File** > **Save As**. +* Select Location to Save file. +* Type the file name into the **File name** field. +* Set **Save as type** as **CSV UTF-8(Comma delimited)(*.csv)**. + + + +* Click **Save**. +* If you have multiple sheets in the workbook, you will get following message + + + +* Click on **OK**. +* Data should be exported to selected location in CSV format. diff --git a/sql-odbc/docs/test/img/data_connection_wizard_dsn.png b/sql-odbc/docs/test/img/data_connection_wizard_dsn.png new file mode 100644 index 0000000000..07f60c18bf Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_dsn.png differ diff --git a/sql-odbc/docs/test/img/data_connection_wizard_enable.png b/sql-odbc/docs/test/img/data_connection_wizard_enable.png new file mode 100644 index 0000000000..865f925523 Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_enable.png differ diff --git a/sql-odbc/docs/test/img/data_connection_wizard_load_data.png b/sql-odbc/docs/test/img/data_connection_wizard_load_data.png new file mode 100644 index 0000000000..ebf6b543d8 Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_load_data.png differ diff --git a/sql-odbc/docs/test/img/data_connection_wizard_save_connection_file.png b/sql-odbc/docs/test/img/data_connection_wizard_save_connection_file.png new file mode 100644 index 0000000000..23becca1c6 Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_save_connection_file.png differ diff --git a/sql-odbc/docs/test/img/data_connection_wizard_select.png b/sql-odbc/docs/test/img/data_connection_wizard_select.png new file mode 100644 index 0000000000..43dfe16053 Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_select.png differ diff --git a/sql-odbc/docs/test/img/data_connection_wizard_select_dsn.png b/sql-odbc/docs/test/img/data_connection_wizard_select_dsn.png new file mode 100644 index 0000000000..5143f7d0eb Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_select_dsn.png differ diff --git a/sql-odbc/docs/test/img/data_connection_wizard_select_worksheet.png b/sql-odbc/docs/test/img/data_connection_wizard_select_worksheet.png new file mode 100644 index 0000000000..bdc3524b0c Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_select_worksheet.png differ diff --git a/sql-odbc/docs/test/img/data_connection_wizard_table_list.png b/sql-odbc/docs/test/img/data_connection_wizard_table_list.png new file mode 100644 index 0000000000..97683afcda Binary files /dev/null and b/sql-odbc/docs/test/img/data_connection_wizard_table_list.png differ diff --git a/sql-odbc/docs/test/img/export_loaded_data.png b/sql-odbc/docs/test/img/export_loaded_data.png new file mode 100644 index 0000000000..568f96f298 Binary files /dev/null and b/sql-odbc/docs/test/img/export_loaded_data.png differ diff --git a/sql-odbc/docs/test/img/export_multiple_sheets.png b/sql-odbc/docs/test/img/export_multiple_sheets.png new file mode 100644 index 0000000000..cb14f1bf09 Binary files /dev/null and b/sql-odbc/docs/test/img/export_multiple_sheets.png differ diff --git a/sql-odbc/docs/test/img/export_save_as_csv.png b/sql-odbc/docs/test/img/export_save_as_csv.png new file mode 100644 index 0000000000..c614d0e6b3 Binary files /dev/null and b/sql-odbc/docs/test/img/export_save_as_csv.png differ diff --git a/sql-odbc/docs/test/img/refresh_add_datarow.png b/sql-odbc/docs/test/img/refresh_add_datarow.png new file mode 100644 index 0000000000..6f2664d4fe Binary files /dev/null and b/sql-odbc/docs/test/img/refresh_add_datarow.png differ diff --git a/sql-odbc/docs/test/img/refresh_data_preview.png b/sql-odbc/docs/test/img/refresh_data_preview.png new file mode 100644 index 0000000000..d2f6c2e7b0 Binary files /dev/null and b/sql-odbc/docs/test/img/refresh_data_preview.png differ diff --git a/sql-odbc/docs/test/img/refresh_load_data.png b/sql-odbc/docs/test/img/refresh_load_data.png new file mode 100644 index 0000000000..fd1078d29a Binary files /dev/null and b/sql-odbc/docs/test/img/refresh_load_data.png differ diff --git a/sql-odbc/docs/test/img/refresh_updated_data.png b/sql-odbc/docs/test/img/refresh_updated_data.png new file mode 100644 index 0000000000..6f1800753d Binary files /dev/null and b/sql-odbc/docs/test/img/refresh_updated_data.png differ diff --git a/sql-odbc/docs/test/microsoft_excel_manual_test_plan.xlsx b/sql-odbc/docs/test/microsoft_excel_manual_test_plan.xlsx new file mode 100644 index 0000000000..76af9295f1 Binary files /dev/null and b/sql-odbc/docs/test/microsoft_excel_manual_test_plan.xlsx differ diff --git a/sql-odbc/docs/test/microsoft_query_connection.md b/sql-odbc/docs/test/microsoft_query_connection.md index a3deaea431..c7b7c54b00 100644 --- a/sql-odbc/docs/test/microsoft_query_connection.md +++ b/sql-odbc/docs/test/microsoft_query_connection.md @@ -10,17 +10,11 @@ -* Click on **Options** in Add Table window. - - - -* Click **Refresh**. Select checkbox **Tables**. Clear all other checkboxes. Click on **OK**. - - - -* Select tables and click on **Add**. After all required tables are added, click on **Close**. +* Select table and click on **Add**. + +* Click on **Close**. * Double click on required columns. You can double-click the asterisk (*) to select all the columns from a table. diff --git a/sql-odbc/docs/test/query_wizard_connection.md b/sql-odbc/docs/test/query_wizard_connection.md index 4dab086dae..3e22e82d01 100644 --- a/sql-odbc/docs/test/query_wizard_connection.md +++ b/sql-odbc/docs/test/query_wizard_connection.md @@ -10,18 +10,6 @@ -* You might get an popup with a message `This data source contains no visible tables`. Click on **OK**. - - - -* Click on **Options** in Query Wizard window. - - - -* Select checkbox **Tables**. Clear all other checkboxes. Click on **OK**. - - - * You will see list of available tables & columns. Select required tables/columns and click on **>**. * After selecting all required columns, Click on **Next**. diff --git a/sql-odbc/docs/test/refresh_data.md b/sql-odbc/docs/test/refresh_data.md new file mode 100644 index 0000000000..3ddc1b4e41 --- /dev/null +++ b/sql-odbc/docs/test/refresh_data.md @@ -0,0 +1,19 @@ +## Refresh the loaded data + +* Load data from Open Distro For Elasticsearch by any using any [connection mode](excel_connection.md). + +For this example, [ODBC](odbc_data_source_connection.md) connection is used with the table `sample_data_for_excel` which has 3 datarows as shown in picture. + + + + + +* Add a datarow in the table `sample_data_for_excel` using kibana. + + + +* Click on **Data** > **Refresh All** > **Refresh**. + + + +* Verify the data is updated. \ No newline at end of file diff --git a/sql-odbc/docs/user/img/excel_advanced_option.png b/sql-odbc/docs/user/img/excel_advanced_option.png new file mode 100644 index 0000000000..6e93af330f Binary files /dev/null and b/sql-odbc/docs/user/img/excel_advanced_option.png differ diff --git a/sql-odbc/docs/user/img/excel_auth.png b/sql-odbc/docs/user/img/excel_auth.png new file mode 100644 index 0000000000..c05dcee094 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_auth.png differ diff --git a/sql-odbc/docs/user/img/excel_credentials.png b/sql-odbc/docs/user/img/excel_credentials.png new file mode 100644 index 0000000000..ad52f7d8fe Binary files /dev/null and b/sql-odbc/docs/user/img/excel_credentials.png differ diff --git a/sql-odbc/docs/user/img/excel_data_load.png b/sql-odbc/docs/user/img/excel_data_load.png new file mode 100644 index 0000000000..f794aaa2de Binary files /dev/null and b/sql-odbc/docs/user/img/excel_data_load.png differ diff --git a/sql-odbc/docs/user/img/excel_data_preview.png b/sql-odbc/docs/user/img/excel_data_preview.png new file mode 100644 index 0000000000..8ca234f039 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_data_preview.png differ diff --git a/sql-odbc/docs/user/img/excel_data_preview_error.png b/sql-odbc/docs/user/img/excel_data_preview_error.png new file mode 100644 index 0000000000..84080fbcfb Binary files /dev/null and b/sql-odbc/docs/user/img/excel_data_preview_error.png differ diff --git a/sql-odbc/docs/user/img/excel_data_preview_mac.png b/sql-odbc/docs/user/img/excel_data_preview_mac.png new file mode 100644 index 0000000000..7d822dee93 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_data_preview_mac.png differ diff --git a/sql-odbc/docs/user/img/excel_export.png b/sql-odbc/docs/user/img/excel_export.png new file mode 100644 index 0000000000..4951bd09e2 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_export.png differ diff --git a/sql-odbc/docs/user/img/excel_export_csv.png b/sql-odbc/docs/user/img/excel_export_csv.png new file mode 100644 index 0000000000..2fa3afa89b Binary files /dev/null and b/sql-odbc/docs/user/img/excel_export_csv.png differ diff --git a/sql-odbc/docs/user/img/excel_iodbc_auth.png b/sql-odbc/docs/user/img/excel_iodbc_auth.png new file mode 100644 index 0000000000..a7d5a65400 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_iodbc_auth.png differ diff --git a/sql-odbc/docs/user/img/excel_iodbc_dsn.png b/sql-odbc/docs/user/img/excel_iodbc_dsn.png new file mode 100644 index 0000000000..d175005f1c Binary files /dev/null and b/sql-odbc/docs/user/img/excel_iodbc_dsn.png differ diff --git a/sql-odbc/docs/user/img/excel_iodbc_test_dsn.png b/sql-odbc/docs/user/img/excel_iodbc_test_dsn.png new file mode 100644 index 0000000000..557f11aea5 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_iodbc_test_dsn.png differ diff --git a/sql-odbc/docs/user/img/excel_load_data.png b/sql-odbc/docs/user/img/excel_load_data.png new file mode 100644 index 0000000000..3546e3769c Binary files /dev/null and b/sql-odbc/docs/user/img/excel_load_data.png differ diff --git a/sql-odbc/docs/user/img/excel_refresh.png b/sql-odbc/docs/user/img/excel_refresh.png new file mode 100644 index 0000000000..e69920b783 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_refresh.png differ diff --git a/sql-odbc/docs/user/img/excel_refresh_mac.png b/sql-odbc/docs/user/img/excel_refresh_mac.png new file mode 100644 index 0000000000..d8b3aa8f9e Binary files /dev/null and b/sql-odbc/docs/user/img/excel_refresh_mac.png differ diff --git a/sql-odbc/docs/user/img/excel_select_database.png b/sql-odbc/docs/user/img/excel_select_database.png new file mode 100644 index 0000000000..59e886c8fb Binary files /dev/null and b/sql-odbc/docs/user/img/excel_select_database.png differ diff --git a/sql-odbc/docs/user/img/excel_select_dsn.png b/sql-odbc/docs/user/img/excel_select_dsn.png new file mode 100644 index 0000000000..b159a47474 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_select_dsn.png differ diff --git a/sql-odbc/docs/user/img/excel_select_dsn_mac.png b/sql-odbc/docs/user/img/excel_select_dsn_mac.png new file mode 100644 index 0000000000..0cb22f2b34 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_select_dsn_mac.png differ diff --git a/sql-odbc/docs/user/img/excel_select_odbc.png b/sql-odbc/docs/user/img/excel_select_odbc.png new file mode 100644 index 0000000000..123e037eb6 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_select_odbc.png differ diff --git a/sql-odbc/docs/user/img/excel_select_worksheet.png b/sql-odbc/docs/user/img/excel_select_worksheet.png new file mode 100644 index 0000000000..825cbecbfd Binary files /dev/null and b/sql-odbc/docs/user/img/excel_select_worksheet.png differ diff --git a/sql-odbc/docs/user/img/excel_table_list.png b/sql-odbc/docs/user/img/excel_table_list.png new file mode 100644 index 0000000000..cbd01583b7 Binary files /dev/null and b/sql-odbc/docs/user/img/excel_table_list.png differ diff --git a/sql-odbc/docs/user/img/pbi_auth.png b/sql-odbc/docs/user/img/pbi_auth.png new file mode 100644 index 0000000000..0795d63d95 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_auth.png differ diff --git a/sql-odbc/docs/user/img/pbi_connection_string_options.png b/sql-odbc/docs/user/img/pbi_connection_string_options.png new file mode 100644 index 0000000000..57b763a856 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_connection_string_options.png differ diff --git a/sql-odbc/docs/user/img/pbi_data_preview.png b/sql-odbc/docs/user/img/pbi_data_preview.png new file mode 100644 index 0000000000..b26bcee6be Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_data_preview.png differ diff --git a/sql-odbc/docs/user/img/pbi_error_conn.png b/sql-odbc/docs/user/img/pbi_error_conn.png new file mode 100644 index 0000000000..87f908a397 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_error_conn.png differ diff --git a/sql-odbc/docs/user/img/pbi_error_driver_not_installed.png b/sql-odbc/docs/user/img/pbi_error_driver_not_installed.png new file mode 100644 index 0000000000..b5059bc0b4 Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_error_driver_not_installed.png differ diff --git a/sql-odbc/docs/user/img/pbi_select_connector.png b/sql-odbc/docs/user/img/pbi_select_connector.png new file mode 100644 index 0000000000..c9d3ec8c4d Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_select_connector.png differ diff --git a/sql-odbc/docs/user/img/pbi_settings.png b/sql-odbc/docs/user/img/pbi_settings.png new file mode 100644 index 0000000000..0f1dcac98b Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_settings.png differ diff --git a/sql-odbc/docs/user/img/pbi_third_party_warning.png b/sql-odbc/docs/user/img/pbi_third_party_warning.png new file mode 100644 index 0000000000..792c110b8c Binary files /dev/null and b/sql-odbc/docs/user/img/pbi_third_party_warning.png differ diff --git a/sql-odbc/docs/user/img/tableau_columns_list.png b/sql-odbc/docs/user/img/tableau_columns_list.png new file mode 100644 index 0000000000..967ba224ca Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_columns_list.png differ diff --git a/sql-odbc/docs/user/img/tableau_data_preview.png b/sql-odbc/docs/user/img/tableau_data_preview.png new file mode 100644 index 0000000000..6c03338f2d Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_data_preview.png differ diff --git a/sql-odbc/docs/user/img/tableau_dialog.png b/sql-odbc/docs/user/img/tableau_dialog.png new file mode 100644 index 0000000000..473291bd8a Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_dialog.png differ diff --git a/sql-odbc/docs/user/img/tableau_download_taco.png b/sql-odbc/docs/user/img/tableau_download_taco.png new file mode 100644 index 0000000000..ec84d503b4 Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_download_taco.png differ diff --git a/sql-odbc/docs/user/img/tableau_graph.png b/sql-odbc/docs/user/img/tableau_graph.png new file mode 100644 index 0000000000..3d0cdce994 Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_graph.png differ diff --git a/sql-odbc/docs/user/img/tableau_select_connector.png b/sql-odbc/docs/user/img/tableau_select_connector.png new file mode 100644 index 0000000000..b3a4d83ad5 Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_select_connector.png differ diff --git a/sql-odbc/docs/user/img/tableau_select_table.png b/sql-odbc/docs/user/img/tableau_select_table.png new file mode 100644 index 0000000000..0e79347304 Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_select_table.png differ diff --git a/sql-odbc/docs/user/img/tableau_table_list.png b/sql-odbc/docs/user/img/tableau_table_list.png new file mode 100644 index 0000000000..104b4becdd Binary files /dev/null and b/sql-odbc/docs/user/img/tableau_table_list.png differ diff --git a/sql-odbc/docs/user/img/test_dsn_configure.png b/sql-odbc/docs/user/img/test_dsn_configure.png new file mode 100644 index 0000000000..2a21e60179 Binary files /dev/null and b/sql-odbc/docs/user/img/test_dsn_configure.png differ diff --git a/sql-odbc/docs/user/img/test_dsn_success.png b/sql-odbc/docs/user/img/test_dsn_success.png new file mode 100644 index 0000000000..98085c0304 Binary files /dev/null and b/sql-odbc/docs/user/img/test_dsn_success.png differ diff --git a/sql-odbc/docs/user/mac_configure_dsn.md b/sql-odbc/docs/user/mac_configure_dsn.md index 0ec3115893..303ad23640 100644 --- a/sql-odbc/docs/user/mac_configure_dsn.md +++ b/sql-odbc/docs/user/mac_configure_dsn.md @@ -12,8 +12,8 @@ Note: In order to use the Open Distro for Elasticsearch SQL ODBC driver with the 1. Go to the **ODBC Drivers** tab. 2. Click **Add a Driver**. * **Description of the Driver**: The driver name used for ODBC connections (ex. `ODFE SQL ODBC Driver`) - * **Driver File Name**: The path to the driver file (default installed path: `/usr/local/lib/odfesqlodbc/bin/libodfesqlodbc.dylib`) - * **Setup File Name**: The path to the driver file (default installed path: `/usr/local/lib/odfesqlodbc/bin/libodfesqlodbc.dylib`) + * **Driver File Name**: The path to the driver file (default installed path: `/Library/ODBC/odfesqlodbc/bin/libodfesqlodbc.dylib`) + * **Setup File Name**: The path to the driver file (default installed path: `/Library/ODBC/odfesqlodbc/bin/libodfesqlodbc.dylib`) * Set as a **User** driver * Below is a screenshot of how the final screen should look. 3. Click **OK** to save the options. diff --git a/sql-odbc/docs/user/microsoft_excel_support.md b/sql-odbc/docs/user/microsoft_excel_support.md new file mode 100644 index 0000000000..158e2d7543 --- /dev/null +++ b/sql-odbc/docs/user/microsoft_excel_support.md @@ -0,0 +1,88 @@ +# Connecting Open Distro For ElasticSearch to Microsoft Excel on Windows + +## Prerequisites + +* Microsoft Excel 2016 and higher +* [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) +* [Open Distro for Elasticsearch SQL ODBC driver](https://opendistro.github.io/for-elasticsearch-docs/docs/sql/odbc/) +* A preconfigured [User or System DSN](../../README.md) + +## Test Successful Connection + +* Open **ODBC Data Source Administrator**. +* Click on **System DSN**. +* Select **ODFE SQL ODBC DSN**. +* Click on **Configure**. + + + +* Modify connection attributes accordingly. +* Click on **Test**. + + + +* You should get a message as **Connection Successful**. + +## Load data + +* Open blank workbook in Microsoft Excel. +* Click on **Data** > **Get Data** > **From Other Sources** > **From ODBC** + + + +* Select **ODFE SQL ODBC DSN**. Click **OK**. + + + +* Select **Default or Custom** in connection credentials windows and click on **Connect**. + + + +* Select a table from list to load data preview. Click on **Load**. + + + +* Data will be loaded in the spreadsheet. + + + +**NOTE**: There are multiple ways to load data in Microsoft Excel. Alternate options are [Data Connection Wizard](../test/data_connection_wizard.md), [Microsoft Query Connection](../test/microsoft_query_connection.md) and [Query Wizard Connection](../test/query_wizard_connection.md). These connection modes will load data relatively faster. + +## Refresh Data + +To refresh the data click on **Query** > **Refresh**. + + + +Alternately, **Data** > **Refresh** option can also be used to refresh the data. + +## Export as CSV files + +* Click on **File** > **Save As**. +* Select Location to Save file. +* Type the file name. +* Set type as **CSV UTF-8(Comma delimited)(*.csv)**. + + + +* Click **Save**. +* Data will be exported to selected location in CSV format. + +## Troubleshooting + +* If the table has large number of datarows, increase [the keepalive](https://github.com/opendistro-for-elasticsearch/sql/blob/master/docs/dev/Pagination.md#opendistrosqlcursorkeep_alive) value accordlingly. + +* If the table has nested or object type column, you might get an error as below. + + + +If you ignore the error and try to load the data, column name and values might not match. + +In this case, please use advanced options while connecting to the data source. + + + +Also, make sure query doesn't include the name of nested or object type column name. For example, `SELECT products FROM kibana_sample_data_ecommerce` where product is nested type column might have data loss. + +This issue will be resolved when [Object field is missing in SELECT result](https://github.com/opendistro-for-elasticsearch/sql/issues/564) is fixed. + diff --git a/sql-odbc/docs/user/microsoft_excel_support_mac.md b/sql-odbc/docs/user/microsoft_excel_support_mac.md new file mode 100644 index 0000000000..df061429ac --- /dev/null +++ b/sql-odbc/docs/user/microsoft_excel_support_mac.md @@ -0,0 +1,90 @@ +# Connecting Open Distro For ElasticSearch to Micosoft Excel on Mac OS + +## Prerequisites + +* Microsoft Excel 2016 and higher +* [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) +* [Open Distro for Elasticsearch SQL ODBC driver](https://opendistro.github.io/for-elasticsearch-docs/docs/sql/odbc/) +* A preconfigured [User or System DSN](mac_configure_dsn.md) + +## Test Successful Connection + +* Open **iODBC Data Source Administrator** using command + +`sudo /Applications/iODBC/iODBC\ Administrator64.app/Contents/MacOS/iODBC\ Administrator64` + +* Click on **System DSN**. +* Select **ODFE SQL ODBC DSN**. +* Click on **Configure**. + + + +* Modify connection attributes accordingly. +* Click on **Ok**. +* Click on **Test**. +* Enter credentials if required and Click **Ok**. + + + +* You should get a message as `The connection DSN was tested successfully, and can be used at this time.` + + + +## Load data + +* Open blank workbook in Microsoft Excel. +* Click on **Data** > **New Database Query** > **From Database**. + + + +* Select **ODFE SQL ODBC DSN**. Click **OK**. + + + +* Enter credentials if required and click on **OK**. + + + +* Select a table from the list. + + + +* Edit SQL statement if required and click on **Run**. Data preview will be loaded. +> **NOTE**: Excel for Mac currently will not show an error if you provide an incorrect query ([Github issue here](https://github.com/opendistro-for-elasticsearch/sql/issues/601)). If no data appears, check your ODFE server logs to see if the query failed. + + + +* Click on **Return Data**. Select sheet and click OK. + + + +* Data will be loaded in the spreadsheet. + + + +## Refresh Data + +To refresh the data click on **Table** > **Refresh**. + + + +Alternately, **Data** > **Refresh** option can also be used to refresh the data. + +## Export as CSV files + +* Click on **File** > **Save As**. +* Type the file name. +* Select Location to Save file. +* Set File Format to **CSV UTF-8(Comma delimited)(*.csv)**. + + + +* Click **Save**. +* Data will be exported to selected location in CSV format. + +## Troubleshooting + +* If the table has large number of datarows, increase [the keepalive](https://github.com/opendistro-for-elasticsearch/sql/blob/master/docs/dev/Pagination.md#opendistrosqlcursorkeep_alive) value accordlingly. + +* You might need to remove `;` from SQL statement to load data preview. + diff --git a/sql-odbc/docs/user/power_bi_support.md b/sql-odbc/docs/user/power_bi_support.md new file mode 100644 index 0000000000..3e5a6f2c4f --- /dev/null +++ b/sql-odbc/docs/user/power_bi_support.md @@ -0,0 +1,58 @@ +# Connecting Open Distro For ElasticSearch to Microsoft Power BI Desktop + +**NOTE**: **The connector is under development. All connection options are not available yet. There could be issues while loading data** + +## Prerequisites +* Microsoft Power BI Desktop +* [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) +* [Open Distro for Elasticsearch SQL ODBC driver](https://opendistro.github.io/for-elasticsearch-docs/docs/sql/odbc/) +* [OdfeSqlOdbcPBIConnector.mez](../../src/PowerBIConnector/bin/Release/) + +## Setup +* Copy `OdfeSqlOdbcPBIConnector.mez` file in the `\Documents\Power BI Desktop\Custom Connectors\` folder. This will let Power BI access custom connector. +* Open Power BI Desktop. +* Change the security settings. Click on **Files** > **Options and settings** > **Options** > **Security** > Select **Allow any extension to load without validation or warning** for Data Extensions. This will allow the custom connector to load data into Power BI. + + + +* Restart Power BI Desktop. + +## Load Data + +* Open Power BI Desktop. +* Click on **Home** > **Get Data** > **More** > **Other**. Select **Open Distro For Elasticsearch (Beta)**. Click on **Connect**. + + + +* You will get a warning for using third-party service. Click on **Continue**. + + + +* Enter host and port values. Click on **OK**. + + + +* Select authentication option. Enter credentials if required and click on **Connect**. + + + +* Select required table. Data preview will be loaded. + + + +* Click on **Load**. + +## Troubleshooting + +* If you get an following error, please install [Open Distro For Elasticsearch SQL ODBC Driver](https://opendistro.github.io/for-elasticsearch-docs/docs/sql/odbc/). + + + +* If you get an following error, + + + +1. Check if host and port values are correct. +2. Check if auth credentials are correct. +3. Check if server is running. + diff --git a/sql-odbc/docs/user/tableau_support.md b/sql-odbc/docs/user/tableau_support.md new file mode 100644 index 0000000000..6f4bb4ecef --- /dev/null +++ b/sql-odbc/docs/user/tableau_support.md @@ -0,0 +1,68 @@ +# Connecting Open Distro For ElasticSearch to Tableau Desktop via the Tableau Connector + +## Overview + +Connect an Open Distro For ElasticSearch data source to Tableau Desktop via the Tableau Connector to create a basic graph. + +## Prerequisites + +* Download and Install [Tableau Desktop](https://www.tableau.com/products/desktop/download) 2020 and higher +* Install and Configure [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) +* Download and Install [Open Distro for Elasticsearch SQL ODBC driver](../../README.md) +* Download Tableau Connector for `SQL by Open Distro for ES` ([odfe_sql_odbc.taco](../../src/TableauConnector/odfe_sql_odbc/odfe_sql_odbc.taco)). +Click on **Download** option for downloading `odfe_sql_odbc.taco` file. + + +## Prepare data + +* Copy `odfe_sql_odbc.taco` file to My Tableau Repository. + + * On windows: **%User%/Documents/My Tableau Repository/Connectors/**. + * On Mac: **~/Documents/My Tableau Repository/Connectors/**. + +* Open Tableau using following command + +``` +\bin\tableau.exe -DDisableVerifyConnectorPluginSignature=true +``` + +* Click on **Connect** > **More** > **SQL by Open Distro for ES**. + + + +* Enter **Server** & **Port** value. +* Select required authentication option. For **AWS_SIGV4** authentication, select **Integrated Authentication** and enter value for **Region**. +* Use **Additional Options** section for specifying options like **FetchSize**, **ResponseTimeout**. Use `;` to separate values. For example, + +``` +FetchSize=2000;ResponseTimeout=20; +``` + + + +* Click on **Sign In**. +* You will get a list of tables when the connection is successful. + + + +## Analyze Data + +To generate a graph, + +* Double click on any required table from the list and click on **Update Now** to load data preview. + + + +* Data preview will be loaded. + + + +* Click on **Sheet 1**. You can see a list of attributes under section **Data**. + + + +* Double click on any required attributes to generate a simple graph. + + + +* You can change visualizations by selecting any active visualization from **Show Me**. \ No newline at end of file diff --git a/sql-odbc/src/CMakeLists.txt b/sql-odbc/src/CMakeLists.txt index d1ebcbcbfd..b8b663f016 100644 --- a/sql-odbc/src/CMakeLists.txt +++ b/sql-odbc/src/CMakeLists.txt @@ -78,8 +78,8 @@ set(INSTALL_SRC "${CMAKE_CURRENT_SOURCE_DIR}/installer") set(DSN_INSTALLER_SRC "${CMAKE_CURRENT_SOURCE_DIR}/DSNInstaller") # ODBC Driver version -set(DRIVER_PACKAGE_VERSION "1.9.0.0") -set(DRIVER_PACKAGE_VERSION_COMMA_SEPARATED "1,9,0,0") +set(DRIVER_PACKAGE_VERSION "1.9.0.1") +set(DRIVER_PACKAGE_VERSION_COMMA_SEPARATED "1,9,0,1") add_compile_definitions( ES_ODBC_VERSION="${DRIVER_PACKAGE_VERSION}" # Comma separated version is required for odbc administrator's driver file. ES_ODBC_DRVFILE_VERSION=${DRIVER_PACKAGE_VERSION_COMMA_SEPARATED} ) diff --git a/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp b/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp index 217bc42d9e..8924bdb119 100644 --- a/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp +++ b/sql-odbc/src/IntegrationTests/ITODBCCatalog/test_odbc_catalog.cpp @@ -71,19 +71,26 @@ typedef struct bind_info { // Column test constants and macro const std::vector< std::string > flights_column_name = { - "FlightNum", "Origin", "OriginLocation", "DestLocation", - "FlightDelay", "DistanceMiles", "FlightTimeMin", "OriginWeather", - "dayOfWeek", "AvgTicketPrice", "Carrier", "FlightDelayMin", - "OriginRegion", "DestAirportID", "FlightDelayType", "timestamp", - "Dest", "FlightTimeHour", "Cancelled", "DistanceKilometers", - "OriginCityName", "DestWeather", "OriginCountry", "DestCountry", - "DestRegion", "DestCityName", "OriginAirportID"}; + "FlightNum", "Origin", "FlightDelay", + "DistanceMiles", "FlightTimeMin", "OriginWeather", + "dayOfWeek", "AvgTicketPrice", "Carrier", + "FlightDelayMin", "OriginRegion", "DestAirportID", + "FlightDelayType", "timestamp", "Dest", + "FlightTimeHour", "Cancelled", "DistanceKilometers", + "OriginCityName", "DestWeather", "OriginCountry", + "DestCountry", "DestRegion", "DestCityName", + "OriginAirportID"}; const std::vector< std::string > flights_data_type = { - "keyword", "keyword", "geo_point", "geo_point", "boolean", "float", - "float", "keyword", "integer", "float", "keyword", "integer", - "keyword", "keyword", "keyword", "date", "keyword", "keyword", - "boolean", "float", "keyword", "keyword", "keyword", "keyword", - "keyword", "keyword", "keyword"}; + "keyword", "keyword", "boolean", "float", "float", "keyword", "integer", + "float", "keyword", "integer", "keyword", "keyword", "keyword", "date", + "keyword", "keyword", "boolean", "float", "keyword", "keyword", "keyword", + "keyword", "keyword", "keyword", "keyword"}; +const std::vector< short > flights_sql_data_type = { + SQL_WVARCHAR, SQL_WVARCHAR, SQL_BIT, SQL_REAL, SQL_REAL, + SQL_WVARCHAR, SQL_INTEGER, SQL_REAL, SQL_WVARCHAR, SQL_INTEGER, + SQL_WVARCHAR, SQL_WVARCHAR, SQL_WVARCHAR, SQL_TYPE_TIMESTAMP, SQL_WVARCHAR, + SQL_WVARCHAR, SQL_BIT, SQL_REAL, SQL_WVARCHAR, SQL_WVARCHAR, + SQL_WVARCHAR, SQL_WVARCHAR, SQL_WVARCHAR, SQL_WVARCHAR, SQL_WVARCHAR}; const std::string flights_catalog_odfe = "odfe-cluster"; const std::string flights_catalog_elas = "elasticsearch"; const std::string flights_table_name = "kibana_sample_data_flights"; @@ -157,9 +164,6 @@ const std::vector< table_data > excel_table_data_all{ }; const std::vector< table_data > table_data_types{ {"", "", "", "BASE TABLE", ""}}; -const std::vector< table_data > table_data_schemas{{"", "", "", "", ""}}; -const std::vector< table_data > table_data_catalogs{ - {"odfe-cluster", "", "", "", ""}}; class TestSQLTables : public testing::Test { public: @@ -229,9 +233,10 @@ void CheckTableData(SQLHSTMT m_hstmt, TEST_F(TestSQLTables, test_name) { \ EXPECT_EQ(SQL_SUCCESS, SQLSetStmtAttr(m_hstmt, SQL_ATTR_METADATA_ID, \ (void*)(!enable_pattern), 0)); \ - EXPECT_TRUE(SQL_SUCCEEDED(SQLTables(m_hstmt, catalog, SQL_NTS, schema, \ + SQLRETURN ret2 = SQLTables(m_hstmt, catalog, SQL_NTS, schema, \ SQL_NTS, table, SQL_NTS, \ - table_type, SQL_NTS))); \ + table_type, SQL_NTS); \ + LogAnyDiagnostics(SQL_HANDLE_STMT, m_hstmt, ret2); \ if (empty) { \ size_t result_count = 0; \ SQLRETURN ret; \ @@ -280,13 +285,21 @@ class TestSQLCatalogKeys : public testing::Test { // NULL test TEST_SQL_TABLES(Null, NULL, NULL, NULL, NULL, table_data_all, true, false); -// Catalog tests -TEST_SQL_TABLES(WildCatalogs, (SQLTCHAR*)L"%", (SQLTCHAR*)L"", (SQLTCHAR*)L"", - NULL, table_data_catalogs, false, false) +// Catalog tests (error: catalogs not supported) +TEST_F(TestSQLTables, WildCatalogs) { + SQLRETURN ret = SQLTables(m_hstmt, (SQLTCHAR*)L"%", SQL_NTS, (SQLTCHAR*)L"", + SQL_NTS, (SQLTCHAR*)L"", SQL_NTS, NULL, SQL_NTS); + EXPECT_EQ(ret, SQL_ERROR); + LogAnyDiagnostics(SQL_HANDLE_STMT, m_hstmt, ret); +} -// Schema tests -TEST_SQL_TABLES(WildSchema, (SQLTCHAR*)L"", (SQLTCHAR*)L"%", (SQLTCHAR*)L"", - NULL, table_data_schemas, false, false) +// Schema tests (error: schemas not supported) +TEST_F(TestSQLTables, WildSchema) { + SQLRETURN ret = SQLTables(m_hstmt, (SQLTCHAR*)L"", SQL_NTS, (SQLTCHAR*)L"%", + SQL_NTS, (SQLTCHAR*)L"", SQL_NTS, NULL, SQL_NTS); + EXPECT_EQ(ret, SQL_ERROR); + LogAnyDiagnostics(SQL_HANDLE_STMT, m_hstmt, ret); +} // Table tests TEST_SQL_TABLES(ValidTable, NULL, NULL, (SQLTCHAR*)L"kibana_sample_data%", NULL, @@ -348,15 +361,16 @@ TEST_F(TestSQLColumns, FlightsValidation) { binds.push_back(bind_info(2, SQL_C_CHAR)); binds.push_back(bind_info(3, SQL_C_CHAR)); binds.push_back(bind_info(4, SQL_C_CHAR)); - binds.push_back(bind_info(5, SQL_C_SSHORT)); + binds.push_back(bind_info(5, SQL_C_SHORT)); binds.push_back(bind_info(6, SQL_C_CHAR)); + binds.push_back(bind_info(7, SQL_C_SLONG)); binds.push_back(bind_info(8, SQL_C_SLONG)); binds.push_back(bind_info(9, SQL_C_SSHORT)); binds.push_back(bind_info(10, SQL_C_SSHORT)); binds.push_back(bind_info(11, SQL_C_SSHORT)); binds.push_back(bind_info(12, SQL_C_CHAR)); binds.push_back(bind_info(13, SQL_C_CHAR)); - binds.push_back(bind_info(14, SQL_C_SSHORT)); + binds.push_back(bind_info(14, SQL_C_SHORT)); binds.push_back(bind_info(15, SQL_C_SSHORT)); binds.push_back(bind_info(16, SQL_C_SLONG)); binds.push_back(bind_info(17, SQL_C_SLONG)); @@ -375,8 +389,7 @@ TEST_F(TestSQLColumns, FlightsValidation) { ordinal++; switch (ordinal) { case 1: - EXPECT_TRUE((it.AsString() == flights_catalog_elas) - || (it.AsString() == flights_catalog_odfe)); + EXPECT_EQ(it.AsString(), ""); break; case 3: EXPECT_EQ(it.AsString(), flights_table_name); @@ -384,16 +397,26 @@ TEST_F(TestSQLColumns, FlightsValidation) { case 4: EXPECT_EQ(it.AsString(), flights_column_name[column_idx]); break; + case 5: + EXPECT_EQ( + it.AsString(), + std::to_string(flights_sql_data_type[column_idx])); + break; case 6: EXPECT_EQ(it.AsString(), flights_data_type[column_idx]); break; - case 9: + case 10: EXPECT_EQ(it.AsString(), flights_decimal_digits); break; - case 10: + case 11: EXPECT_EQ(it.AsString(), flights_num_prec_radix); break; - case 16: + case 14: + EXPECT_EQ( + it.AsString(), + std::to_string(flights_sql_data_type[column_idx])); + break; + case 17: EXPECT_EQ(it.AsString(), std::to_string(column_idx + 1)); break; default: @@ -404,7 +427,7 @@ TEST_F(TestSQLColumns, FlightsValidation) { } column_idx++; } - EXPECT_EQ(column_idx, static_cast< size_t >(27)); + EXPECT_EQ(column_idx, static_cast< size_t >(25)); } // We expect an empty result set for PrimaryKeys and ForeignKeys diff --git a/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp b/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp index c194d2a24c..4f2e43b8cf 100644 --- a/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp +++ b/sql-odbc/src/IntegrationTests/ITODBCInfo/test_odbc_info.cpp @@ -137,17 +137,23 @@ int Ver1GEVer2(std::wstring ver_1_str, std::wstring ver_2_str) { // Driver Info // ///////////////// +#if defined(WIN32) TEST_SQL_GET_INFO_STRING(SQLDriverName, SQL_DRIVER_NAME, L"odfesqlodbc.dll"); +#elif defined(__APPLE__) +TEST_SQL_GET_INFO_STRING(SQLDriverName, SQL_DRIVER_NAME, L"libodfesqlodbc.dylib"); +#endif TEST_SQL_GET_INFO_STRING(SQLDriverODBCVer, SQL_DRIVER_ODBC_VER, L"03.51"); -std::wstring version = std::wstring_convert< std::codecvt_utf8_utf16< wchar_t >, wchar_t >{} +std::wstring version = + std::wstring_convert< std::codecvt_utf8_utf16< wchar_t >, wchar_t >{} .from_bytes(ELASTICSEARCHDRIVERVERSION); TEST_SQL_GET_INFO_STRING(SQLDriverVer, SQL_DRIVER_VER, version); TEST_SQL_GET_INFO_UINT16(SQLGetDataExtensions, SQL_GETDATA_EXTENSIONS, (SQL_GD_ANY_COLUMN | SQL_GD_ANY_ORDER | SQL_GD_BOUND | SQL_GD_BLOCK)); -TEST_SQL_GET_INFO_STRING(SQLSearchPatternEscape, SQL_SEARCH_PATTERN_ESCAPE, L""); +TEST_SQL_GET_INFO_STRING(SQLSearchPatternEscape, SQL_SEARCH_PATTERN_ESCAPE, + L""); ////////////////////// // Data Source Info // @@ -158,8 +164,8 @@ TEST_SQL_GET_INFO_UINT16(SQLCursorCommitBehavior, SQL_CURSOR_COMMIT_BEHAVIOR, TEST_SQL_GET_INFO_UINT16(SQLTxnCapable, SQL_TXN_CAPABLE, SQL_TC_NONE); TEST_SQL_GET_INFO_UINT16(SQLConcatNullBehavior, SQL_CONCAT_NULL_BEHAVIOR, SQL_CB_NULL); -TEST_SQL_GET_INFO_STRING(SQLSchemaTerm, SQL_SCHEMA_TERM, L"schema"); -TEST_SQL_GET_INFO_STRING(SQLCatalogTerm, SQL_CATALOG_TERM, L"catalog"); +TEST_SQL_GET_INFO_STRING(SQLSchemaTerm, SQL_SCHEMA_TERM, L""); +TEST_SQL_GET_INFO_STRING(SQLCatalogTerm, SQL_CATALOG_TERM, L""); /////////////// // DBMS Info // @@ -173,7 +179,8 @@ TEST_SQL_GET_INFO_VERSION_GE(SQLDBMSVer, SQL_DBMS_VER, L"7.1.1"); /////////////////// TEST_SQL_GET_INFO_STRING(SQLColumnAlias, SQL_COLUMN_ALIAS, L"Y"); -TEST_SQL_GET_INFO_UINT16(SQLGroupBy, SQL_GROUP_BY, SQL_GB_GROUP_BY_EQUALS_SELECT); +TEST_SQL_GET_INFO_UINT16(SQLGroupBy, SQL_GROUP_BY, + SQL_GB_GROUP_BY_EQUALS_SELECT); TEST_SQL_GET_INFO_STRING(SQLIdentifierQuoteChar, SQL_IDENTIFIER_QUOTE_CHAR, L"`"); TEST_SQL_GET_INFO_UINT_MASK(SQLOJCapabilities, SQL_OJ_CAPABILITIES, @@ -187,11 +194,10 @@ TEST_SQL_GET_INFO_UINT_MASK(SQLODBCInterfaceConformance, SQL_ODBC_INTERFACE_CONFORMANCE, SQL_OIC_CORE); TEST_SQL_GET_INFO_UINT_MASK(SQLSQLConformance, SQL_SQL_CONFORMANCE, SQL_SC_SQL92_ENTRY); -TEST_SQL_GET_INFO_UINT_MASK(SQLCatalogUsage, SQL_CATALOG_USAGE, - SQL_CU_DML_STATEMENTS); -TEST_SQL_GET_INFO_UINT16(SQLCatalogLocation, SQL_CATALOG_LOCATION, SQL_QL_START); +TEST_SQL_GET_INFO_UINT_MASK(SQLCatalogUsage, SQL_CATALOG_USAGE, 0); +TEST_SQL_GET_INFO_UINT16(SQLCatalogLocation, SQL_CATALOG_LOCATION, 0); TEST_SQL_GET_INFO_STRING(SQLCatalogNameSeparator, SQL_CATALOG_NAME_SEPARATOR, - L"."); + L""); TEST_SQL_GET_INFO_UINT_MASK(SQLSQL92Predicates, SQL_SQL92_PREDICATES, SQL_SP_BETWEEN | SQL_SP_COMPARISON | SQL_SP_IN | SQL_SP_ISNULL | SQL_SP_LIKE); @@ -204,7 +210,8 @@ TEST_SQL_GET_INFO_UINT_MASK(SQLSQL92ValueExpressions, SQL_SQL92_VALUE_EXPRESSIONS, SQL_SVE_CASE | SQL_SVE_CAST); TEST_SQL_GET_INFO_UINT_MASK(SQLDatetimeLiterals, SQL_DATETIME_LITERALS, 0); -TEST_SQL_GET_INFO_STRING(SQLOrderByColumnsInSelect, SQL_ORDER_BY_COLUMNS_IN_SELECT, L"Y"); +TEST_SQL_GET_INFO_STRING(SQLOrderByColumnsInSelect, + SQL_ORDER_BY_COLUMNS_IN_SELECT, L"Y"); TEST_SQL_GET_INFO_STRING(SQLCatalogName, SQL_CATALOG_NAME, L"N"); //////////////// @@ -232,11 +239,11 @@ TEST_SQL_GET_INFO_UINT_MASK(SQLConvertVarbinary, SQL_CONVERT_VARBINARY, 0); TEST_SQL_GET_INFO_UINT_MASK(SQLConvertChar, SQL_CONVERT_CHAR, 0); TEST_SQL_GET_INFO_UINT_MASK(SQLConvertLongVarchar, SQL_CONVERT_LONGVARCHAR, 0); TEST_SQL_GET_INFO_UINT_MASK(SQLConvertWChar, SQL_CONVERT_WCHAR, 0); -TEST_SQL_GET_INFO_UINT_MASK(SQLConvertWLongVarchar, SQL_CONVERT_WLONGVARCHAR, 0); +TEST_SQL_GET_INFO_UINT_MASK(SQLConvertWLongVarchar, SQL_CONVERT_WLONGVARCHAR, + 0); TEST_SQL_GET_INFO_UINT_MASK(SQLConvertWVarchar, SQL_CONVERT_WVARCHAR, 0); TEST_SQL_GET_INFO_UINT_MASK(SQLConvertGuid, SQL_CONVERT_GUID, 0); - ////////////////////// // Scalar Functions // ////////////////////// @@ -278,8 +285,10 @@ TEST_SQL_GET_INFO_UINT_MASK(SQLSQL92StringFunctions, SQL_SQL92_STRING_FUNCTIONS, //////////// TEST_SQL_GET_INFO_UINT16(SQLMaxIdentifierLen, SQL_MAX_IDENTIFIER_LEN, SHRT_MAX); -TEST_SQL_GET_INFO_UINT16(SQLMaxColumnsInGroupBy, SQL_MAX_COLUMNS_IN_GROUP_BY, 0); -TEST_SQL_GET_INFO_UINT16(SQLMaxColumnsInOrderBy, SQL_MAX_COLUMNS_IN_ORDER_BY, 0); +TEST_SQL_GET_INFO_UINT16(SQLMaxColumnsInGroupBy, SQL_MAX_COLUMNS_IN_GROUP_BY, + 0); +TEST_SQL_GET_INFO_UINT16(SQLMaxColumnsInOrderBy, SQL_MAX_COLUMNS_IN_ORDER_BY, + 0); TEST_SQL_GET_INFO_UINT16(SQLMaxColumnsInSelect, SQL_MAX_COLUMNS_IN_SELECT, 0); int main(int argc, char** argv) { diff --git a/sql-odbc/src/IntegrationTests/ITODBCResults/test_odbc_results.cpp b/sql-odbc/src/IntegrationTests/ITODBCResults/test_odbc_results.cpp index f46e160343..61aeaf1002 100644 --- a/sql-odbc/src/IntegrationTests/ITODBCResults/test_odbc_results.cpp +++ b/sql-odbc/src/IntegrationTests/ITODBCResults/test_odbc_results.cpp @@ -936,6 +936,7 @@ TEST_F(TestSQLDescribeCol, SingleColumnMetadata) { EXPECT_EQ(single_col, m_column_name); EXPECT_EQ(single_col_name_length, m_column_name_length); EXPECT_EQ(single_col_data_type, m_data_type); + // TODO #628 - Investigate why value differs & fix validation accordingly EXPECT_EQ(single_col_column_size, m_column_size); EXPECT_EQ(single_col_decimal_digit, m_decimal_digits); EXPECT_EQ(single_col_nullable, m_nullable); diff --git a/sql-odbc/src/PowerBIConnector/Diagnostics.pqm b/sql-odbc/src/PowerBIConnector/Diagnostics.pqm new file mode 100644 index 0000000000..ca10c3cf98 --- /dev/null +++ b/sql-odbc/src/PowerBIConnector/Diagnostics.pqm @@ -0,0 +1,275 @@ +let + Diagnostics.LogValue = (prefix, value) => Diagnostics.Trace(TraceLevel.Information, prefix & ": " & (try Diagnostics.ValueToText(value) otherwise ""), value), + Diagnostics.LogValue2 = (prefix, value, result, optional delayed) => Diagnostics.Trace(TraceLevel.Information, prefix & ": " & Diagnostics.ValueToText(value), result, delayed), + Diagnostics.LogFailure = (text, function) => + let + result = try function() + in + if result[HasError] then Diagnostics.LogValue2(text, result[Error], () => error result[Error], true) else result[Value], + + Diagnostics.WrapFunctionResult = (innerFunction as function, outerFunction as function) as function => + Function.From(Value.Type(innerFunction), (list) => outerFunction(() => Function.Invoke(innerFunction, list))), + + Diagnostics.WrapHandlers = (handlers as record) as record => + Record.FromList( + List.Transform( + Record.FieldNames(handlers), + (h) => Diagnostics.WrapFunctionResult(Record.Field(handlers, h), (fn) => Diagnostics.LogFailure(h, fn))), + Record.FieldNames(handlers)), + + Diagnostics.ValueToText = (value) => + let + _canBeIdentifier = (x) => + let + keywords = {"and", "as", "each", "else", "error", "false", "if", "in", "is", "let", "meta", "not", "otherwise", "or", "section", "shared", "then", "true", "try", "type" }, + charAlpha = (c as number) => (c>= 65 and c <= 90) or (c>= 97 and c <= 122) or c=95, + charDigit = (c as number) => c>= 48 and c <= 57 + in + try + charAlpha(Character.ToNumber(Text.At(x,0))) + and + List.MatchesAll( + Text.ToList(x), + (c)=> let num = Character.ToNumber(c) in charAlpha(num) or charDigit(num) + ) + and not + List.MatchesAny( keywords, (li)=> li=x ) + otherwise + false, + + Serialize.Binary = (x) => "#binary(" & Serialize(Binary.ToList(x)) & ") ", + + Serialize.Date = (x) => "#date(" & + Text.From(Date.Year(x)) & ", " & + Text.From(Date.Month(x)) & ", " & + Text.From(Date.Day(x)) & ") ", + + Serialize.Datetime = (x) => "#datetime(" & + Text.From(Date.Year(DateTime.Date(x))) & ", " & + Text.From(Date.Month(DateTime.Date(x))) & ", " & + Text.From(Date.Day(DateTime.Date(x))) & ", " & + Text.From(Time.Hour(DateTime.Time(x))) & ", " & + Text.From(Time.Minute(DateTime.Time(x))) & ", " & + Text.From(Time.Second(DateTime.Time(x))) & ") ", + + Serialize.Datetimezone =(x) => let + dtz = DateTimeZone.ToRecord(x) + in + "#datetimezone(" & + Text.From(dtz[Year]) & ", " & + Text.From(dtz[Month]) & ", " & + Text.From(dtz[Day]) & ", " & + Text.From(dtz[Hour]) & ", " & + Text.From(dtz[Minute]) & ", " & + Text.From(dtz[Second]) & ", " & + Text.From(dtz[ZoneHours]) & ", " & + Text.From(dtz[ZoneMinutes]) & ") ", + + Serialize.Duration = (x) => let + dur = Duration.ToRecord(x) + in + "#duration(" & + Text.From(dur[Days]) & ", " & + Text.From(dur[Hours]) & ", " & + Text.From(dur[Minutes]) & ", " & + Text.From(dur[Seconds]) & ") ", + + Serialize.Function = (x) => _serialize_function_param_type( + Type.FunctionParameters(Value.Type(x)), + Type.FunctionRequiredParameters(Value.Type(x)) ) & + " as " & + _serialize_function_return_type(Value.Type(x)) & + " => (...) ", + + Serialize.List = (x) => "{" & + List.Accumulate(x, "", (seed,item) => if seed="" then Serialize(item) else seed & ", " & Serialize(item)) & + "} ", + + Serialize.Logical = (x) => Text.From(x), + + Serialize.Null = (x) => "null", + + Serialize.Number = (x) => + let Text.From = (i as number) as text => + if Number.IsNaN(i) then "#nan" else + if i=Number.PositiveInfinity then "#infinity" else + if i=Number.NegativeInfinity then "-#infinity" else + Text.From(i) + in + Text.From(x), + + Serialize.Record = (x) => "[ " & + List.Accumulate( + Record.FieldNames(x), + "", + (seed,item) => + (if seed="" then Serialize.Identifier(item) else seed & ", " & Serialize.Identifier(item)) & " = " & Serialize(Record.Field(x, item)) + ) & + " ] ", + + Serialize.Table = (x) => "#table( type " & + _serialize_table_type(Value.Type(x)) & + ", " & + Serialize(Table.ToRows(x)) & + ") ", + + Serialize.Text = (x) => """" & + _serialize_text_content(x) & + """", + + _serialize_text_content = (x) => let + escapeText = (n as number) as text => "#(#)(" & Text.PadStart(Number.ToText(n, "X", "en-US"), 4, "0") & ")" + in + List.Accumulate( + List.Transform( + Text.ToList(x), + (c) => let n=Character.ToNumber(c) in + if n = 9 then "#(#)(tab)" else + if n = 10 then "#(#)(lf)" else + if n = 13 then "#(#)(cr)" else + if n = 34 then """""" else + if n = 35 then "#(#)(#)" else + if n < 32 then escapeText(n) else + if n < 127 then Character.FromNumber(n) else + escapeText(n) + ), + "", + (s,i)=>s&i + ), + + Serialize.Identifier = (x) => + if _canBeIdentifier(x) then + x + else + "#""" & + _serialize_text_content(x) & + """", + + Serialize.Time = (x) => "#time(" & + Text.From(Time.Hour(x)) & ", " & + Text.From(Time.Minute(x)) & ", " & + Text.From(Time.Second(x)) & ") ", + + Serialize.Type = (x) => "type " & _serialize_typename(x), + + + _serialize_typename = (x, optional funtype as logical) => /* Optional parameter: Is this being used as part of a function signature? */ + let + isFunctionType = (x as type) => try if Type.FunctionReturn(x) is type then true else false otherwise false, + isTableType = (x as type) => try if Type.TableSchema(x) is table then true else false otherwise false, + isRecordType = (x as type) => try if Type.ClosedRecord(x) is type then true else false otherwise false, + isListType = (x as type) => try if Type.ListItem(x) is type then true else false otherwise false + in + + if funtype=null and isTableType(x) then _serialize_table_type(x) else + if funtype=null and isListType(x) then "{ " & @_serialize_typename( Type.ListItem(x) ) & " }" else + if funtype=null and isFunctionType(x) then "function " & _serialize_function_type(x) else + if funtype=null and isRecordType(x) then _serialize_record_type(x) else + + if x = type any then "any" else + let base = Type.NonNullable(x) in + (if Type.IsNullable(x) then "nullable " else "") & + (if base = type anynonnull then "anynonnull" else + if base = type binary then "binary" else + if base = type date then "date" else + if base = type datetime then "datetime" else + if base = type datetimezone then "datetimezone" else + if base = type duration then "duration" else + if base = type logical then "logical" else + if base = type none then "none" else + if base = type null then "null" else + if base = type number then "number" else + if base = type text then "text" else + if base = type time then "time" else + if base = type type then "type" else + + /* Abstract types: */ + if base = type function then "function" else + if base = type table then "table" else + if base = type record then "record" else + if base = type list then "list" else + + "any /*Actually unknown type*/"), + + _serialize_table_type = (x) => + let + schema = Type.TableSchema(x) + in + "table " & + (if Table.IsEmpty(schema) then "" else + "[" & List.Accumulate( + List.Transform( + Table.ToRecords(Table.Sort(schema,"Position")), + each Serialize.Identifier(_[Name]) & " = " & _[Kind]), + "", + (seed,item) => (if seed="" then item else seed & ", " & item ) + ) & "] " ), + + _serialize_record_type = (x) => + let flds = Type.RecordFields(x) + in + if Record.FieldCount(flds)=0 then "record" else + "[" & List.Accumulate( + Record.FieldNames(flds), + "", + (seed,item) => + seed & + (if seed<>"" then ", " else "") & + (Serialize.Identifier(item) & "=" & _serialize_typename(Record.Field(flds,item)[Type]) ) + ) & + (if Type.IsOpenRecord(x) then ",..." else "") & + "]", + + _serialize_function_type = (x) => _serialize_function_param_type( + Type.FunctionParameters(x), + Type.FunctionRequiredParameters(x) ) & + " as " & + _serialize_function_return_type(x), + + _serialize_function_param_type = (t,n) => + let + funsig = Table.ToRecords( + Table.TransformColumns( + Table.AddIndexColumn( Record.ToTable( t ), "isOptional", 1 ), + { "isOptional", (x)=> x>n } ) ) + in + "(" & + List.Accumulate( + funsig, + "", + (seed,item)=> + (if seed="" then "" else seed & ", ") & + (if item[isOptional] then "optional " else "") & + Serialize.Identifier(item[Name]) & " as " & _serialize_typename(item[Value], true) ) + & ")", + + _serialize_function_return_type = (x) => _serialize_typename(Type.FunctionReturn(x), true), + + Serialize = (x) as text => + if x is binary then try Serialize.Binary(x) otherwise "null /*serialize failed*/" else + if x is date then try Serialize.Date(x) otherwise "null /*serialize failed*/" else + if x is datetime then try Serialize.Datetime(x) otherwise "null /*serialize failed*/" else + if x is datetimezone then try Serialize.Datetimezone(x) otherwise "null /*serialize failed*/" else + if x is duration then try Serialize.Duration(x) otherwise "null /*serialize failed*/" else + if x is function then try Serialize.Function(x) otherwise "null /*serialize failed*/" else + if x is list then try Serialize.List(x) otherwise "null /*serialize failed*/" else + if x is logical then try Serialize.Logical(x) otherwise "null /*serialize failed*/" else + if x is null then try Serialize.Null(x) otherwise "null /*serialize failed*/" else + if x is number then try Serialize.Number(x) otherwise "null /*serialize failed*/" else + if x is record then try Serialize.Record(x) otherwise "null /*serialize failed*/" else + if x is table then try Serialize.Table(x) otherwise "null /*serialize failed*/" else + if x is text then try Serialize.Text(x) otherwise "null /*serialize failed*/" else + if x is time then try Serialize.Time(x) otherwise "null /*serialize failed*/" else + if x is type then try Serialize.Type(x) otherwise "null /*serialize failed*/" else + "[#_unable_to_serialize_#]" + in + try Serialize(value) otherwise "" +in + [ + LogValue = Diagnostics.LogValue, + LogValue2 = Diagnostics.LogValue2, + LogFailure = Diagnostics.LogFailure, + WrapFunctionResult = Diagnostics.WrapFunctionResult, + WrapHandlers = Diagnostics.WrapHandlers, + ValueToText = Diagnostics.ValueToText + ] \ No newline at end of file diff --git a/sql-odbc/src/PowerBIConnector/OdbcConstants.pqm b/sql-odbc/src/PowerBIConnector/OdbcConstants.pqm new file mode 100644 index 0000000000..144e525413 --- /dev/null +++ b/sql-odbc/src/PowerBIConnector/OdbcConstants.pqm @@ -0,0 +1,1253 @@ +// values from https://github.com/Microsoft/ODBC-Specification/blob/master/Windows/inc/sqlext.h +[ + Flags = (flags as list) => + if (List.IsEmpty(flags)) then 0 else + let + Loop = List.Generate(()=> [i = 0, Combined = flags{0}], + each [i] < List.Count(flags), + each [Combined = Number.BitwiseOr([Combined], flags{i}), i = [i]+1], + each [Combined]), + Result = List.Last(Loop) + in + Result, + + SQL_HANDLE = + [ + ENV = 1, + DBC = 2, + STMT = 3, + DESC = 4 + ], + + RetCode = + [ + SUCCESS = 0, + SUCCESS_WITH_INFO = 1, + ERROR = -1, + INVALID_HANDLE = -2, + NO_DATA = 100 + ], + + SQL_CONVERT = + [ + BIGINT = 53, + BINARY = 54, + BIT = 55, + CHAR = 56, + DATE = 57, + DECIMAL = 58, + DOUBLE = 59, + FLOAT = 60, + INTEGER = 61, + LONGVARCHAR = 62, + NUMERIC = 63, + REAL = 64, + SMALLINT = 65, + TIME = 66, + TIMESTAMP = 67, + TINYINT = 68, + VARBINARY = 69, + VARCHAR = 70, + LONGVARBINARY = 71 + ], + + SQL_ROW = + [ + PROCEED = 0, + IGNORE = 1, + SUCCESS = 0, + DELETED = 1, + UPDATED = 2, + NOROW = 3, + ADDED = 4, + ERROR = 5, + SUCCESS_WITH_INFO = 6 + ], + +SQL_CVT = +[ + //None = 0, + + CHAR = 0x00000001, + NUMERIC = 0x00000002, + DECIMAL = 0x00000004, + INTEGER = 0x00000008, + SMALLINT = 0x00000010, + FLOAT = 0x00000020, + REAL = 0x00000040, + DOUBLE = 0x00000080, + VARCHAR = 0x00000100, + LONGVARCHAR = 0x00000200, + BINARY = 0x00000400, + VARBINARY = 0x00000800, + BIT = 0x00001000, + TINYINT = 0x00002000, + BIGINT = 0x00004000, + DATE = 0x00008000, + TIME = 0x00010000, + TIMESTAMP = 0x00020000, + LONGVARBINARY = 0x00040000, + INTERVAL_YEAR_MONTH = 0x00080000, + INTERVAL_DAY_TIME = 0x00100000, + WCHAR = 0x00200000, + WLONGVARCHAR = 0x00400000, + WVARCHAR = 0x00800000, + GUID = 0x01000000 +], + + STMT = + [ + CLOSE = 0, + DROP = 1, + UNBIND = 2, + RESET_PARAMS = 3 + ], + + SQL_MAX = + [ + NUMERIC_LEN = 16 + ], + + SQL_IS = + [ + POINTER = -4, + INTEGER = -6, + UINTEGER = -5, + SMALLINT = -8 + ], + + //SQL Server specific defines + // + SQL_HC = // from Odbcss.h + [ + OFF = 0, // FOR BROWSE columns are hidden + ON = 1 // FOR BROWSE columns are exposed + ], + + SQL_NB = // from Odbcss.h + [ + OFF = 0, // NO_BROWSETABLE is off + ON = 1 // NO_BROWSETABLE is on + ], + + // SQLColAttributes driver specific defines. + // SQLSet/GetDescField driver specific defines. + // Microsoft has 1200 thru 1249 reserved for Microsoft SQL Server driver usage. + // + SQL_CA_SS = // from Odbcss.h + [ + BASE = 1200, // SQL_CA_SS_BASE + + COLUMN_HIDDEN = 1200 + 11, // Column is hidden (FOR BROWSE) + COLUMN_KEY = 1200 + 12, // Column is key column (FOR BROWSE) + VARIANT_TYPE = 1200 + 15, + VARIANT_SQL_TYPE = 1200 + 16, + VARIANT_SERVER_TYPE = 1200 + 17 + + ], + + SQL_SOPT_SS = // from Odbcss.h + [ + BASE = 1225, // SQL_SOPT_SS_BASE + HIDDEN_COLUMNS = 1225 + 2, // Expose FOR BROWSE hidden columns + NOBROWSETABLE = 1225 + 3 // Set NOBROWSETABLE option + ], + + SQL_COMMIT = 0, //Commit + SQL_ROLLBACK = 1, //Abort + + //static public readonly IntPtr SQL_AUTOCOMMIT_OFF = IntPtr.Zero; + //static public readonly IntPtr SQL_AUTOCOMMIT_ON = new IntPtr(1); + + SQL_TRANSACTION = + [ + READ_UNCOMMITTED = 0x00000001, + READ_COMMITTED = 0x00000002, + REPEATABLE_READ = 0x00000004, + SERIALIZABLE = 0x00000008, + SNAPSHOT = 0x00000020 // VSDD 414121: SQL_TXN_SS_SNAPSHOT == 0x20 (sqlncli.h) + ], + + SQL_PARAM = + [ + TYPE_UNKNOWN = 0, // SQL_PARAM_TYPE_UNKNOWN + INPUT = 1, // SQL_PARAM_INPUT + INPUT_OUTPUT = 2, // SQL_PARAM_INPUT_OUTPUT + RESULT_COL = 3, // SQL_RESULT_COL + OUTPUT = 4, // SQL_PARAM_OUTPUT + RETURN_VALUE = 5 // SQL_RETURN_VALUE + ], + + SQL_DESC = + [ + // from sql.h (ODBCVER >= 3.0) + // + COUNT = 1001, + TYPE = 1002, + LENGTH = 1003, + OCTET_LENGTH_PTR = 1004, + PRECISION = 1005, + SCALE = 1006, + DATETIME_INTERVAL_CODE = 1007, + NULLABLE = 1008, + INDICATOR_PTR = 1009, + DATA_PTR = 1010, + NAME = 1011, + UNNAMED = 1012, + OCTET_LENGTH = 1013, + ALLOC_TYPE = 1099, + + // from sqlext.h (ODBCVER >= 3.0) + // + CONCISE_TYPE = SQL_COLUMN[TYPE], + DISPLAY_SIZE = SQL_COLUMN[DISPLAY_SIZE], + UNSIGNED = SQL_COLUMN[UNSIGNED], + UPDATABLE = SQL_COLUMN[UPDATABLE], + AUTO_UNIQUE_VALUE = SQL_COLUMN[AUTO_INCREMENT], + + TYPE_NAME = SQL_COLUMN[TYPE_NAME], + TABLE_NAME = SQL_COLUMN[TABLE_NAME], + SCHEMA_NAME = SQL_COLUMN[OWNER_NAME], + CATALOG_NAME = SQL_COLUMN[QUALIFIER_NAME], + + BASE_COLUMN_NAME = 22, + BASE_TABLE_NAME = 23, + + NUM_PREC_RADIX = 32 + ], + + // ODBC version 2.0 style attributes + // All IdentifierValues are ODBC 1.0 unless marked differently + // + SQL_COLUMN = + [ + COUNT = 0, + NAME = 1, + TYPE = 2, + LENGTH = 3, + PRECISION = 4, + SCALE = 5, + DISPLAY_SIZE = 6, + NULLABLE = 7, + UNSIGNED = 8, + MONEY = 9, + UPDATABLE = 10, + AUTO_INCREMENT = 11, + CASE_SENSITIVE = 12, + SEARCHABLE = 13, + TYPE_NAME = 14, + TABLE_NAME = 15, // (ODBC 2.0) + OWNER_NAME = 16, // (ODBC 2.0) + QUALIFIER_NAME = 17, // (ODBC 2.0) + LABEL = 18 + ], + + // values from sqlext.h + SQL_SQL92_RELATIONAL_JOIN_OPERATORS = + [ + CORRESPONDING_CLAUSE = 0x00000001, // SQL_SRJO_CORRESPONDING_CLAUSE + CROSS_JOIN = 0x00000002, // SQL_SRJO_CROSS_JOIN + EXCEPT_JOIN = 0x00000004, // SQL_SRJO_EXCEPT_JOIN + FULL_OUTER_JOIN = 0x00000008, // SQL_SRJO_FULL_OUTER_JOIN + INNER_JOIN = 0x00000010, // SQL_SRJO_INNER_JOIN + INTERSECT_JOIN = 0x00000020, // SQL_SRJO_INTERSECT_JOIN + LEFT_OUTER_JOIN = 0x00000040, // SQL_SRJO_LEFT_OUTER_JOIN + NATURAL_JOIN = 0x00000080, // SQL_SRJO_NATURAL_JOIN + RIGHT_OUTER_JOIN = 0x00000100, // SQL_SRJO_RIGHT_OUTER_JOIN + UNION_JOIN = 0x00000200 // SQL_SRJO_UNION_JOIN + ], + + // values from sqlext.h + SQL_QU = + [ + SQL_QU_DML_STATEMENTS = 0x00000001, + SQL_QU_PROCEDURE_INVOCATION = 0x00000002, + SQL_QU_TABLE_DEFINITION = 0x00000004, + SQL_QU_INDEX_DEFINITION = 0x00000008, + SQL_QU_PRIVILEGE_DEFINITION = 0x00000010 + ], + + // values from sql.h + SQL_OJ_CAPABILITIES = + [ + LEFT = 0x00000001, // SQL_OJ_LEFT + RIGHT = 0x00000002, // SQL_OJ_RIGHT + FULL = 0x00000004, // SQL_OJ_FULL + NESTED = 0x00000008, // SQL_OJ_NESTED + NOT_ORDERED = 0x00000010, // SQL_OJ_NOT_ORDERED + INNER = 0x00000020, // SQL_OJ_INNER + ALL_COMPARISON_OPS = 0x00000040 //SQL_OJ_ALLCOMPARISION+OPS + ], + + SQL_UPDATABLE = + [ + READONLY = 0, // SQL_ATTR_READ_ONLY + WRITE = 1, // SQL_ATTR_WRITE + READWRITE_UNKNOWN = 2 // SQL_ATTR_READWRITE_UNKNOWN + ], + + SQL_IDENTIFIER_CASE = + [ + UPPER = 1, // SQL_IC_UPPER + LOWER = 2, // SQL_IC_LOWER + SENSITIVE = 3, // SQL_IC_SENSITIVE + MIXED = 4 // SQL_IC_MIXED + ], + + // Uniqueness parameter in the SQLStatistics function + SQL_INDEX = + [ + UNIQUE = 0, + ALL = 1 + ], + + // Reserved parameter in the SQLStatistics function + SQL_STATISTICS_RESERVED = + [ + QUICK = 0, // SQL_QUICK + ENSURE = 1 // SQL_ENSURE + ], + + // Identifier type parameter in the SQLSpecialColumns function + SQL_SPECIALCOLS = + [ + BEST_ROWID = 1, // SQL_BEST_ROWID + ROWVER = 2 // SQL_ROWVER + ], + + // Scope parameter in the SQLSpecialColumns function + SQL_SCOPE = + [ + CURROW = 0, // SQL_SCOPE_CURROW + TRANSACTION = 1, // SQL_SCOPE_TRANSACTION + SESSION = 2 // SQL_SCOPE_SESSION + ], + + SQL_NULLABILITY = + [ + NO_NULLS = 0, // SQL_NO_NULLS + NULLABLE = 1, // SQL_NULLABLE + UNKNOWN = 2 // SQL_NULLABLE_UNKNOWN + ], + + SQL_SEARCHABLE = + [ + UNSEARCHABLE = 0, // SQL_UNSEARCHABLE + LIKE_ONLY = 1, // SQL_LIKE_ONLY + ALL_EXCEPT_LIKE = 2, // SQL_ALL_EXCEPT_LIKE + SEARCHABLE = 3 // SQL_SEARCHABLE + ], + + SQL_UNNAMED = + [ + NAMED = 0, // SQL_NAMED + UNNAMED = 1 // SQL_UNNAMED + ], + // todo:move + // internal constants + // not odbc specific + // + HANDLER = + [ + IGNORE = 0x00000000, + THROW = 0x00000001 + ], + + // values for SQLStatistics TYPE column + SQL_STATISTICSTYPE = + [ + TABLE_STAT = 0, // TABLE Statistics + INDEX_CLUSTERED = 1, // CLUSTERED index statistics + INDEX_HASHED = 2, // HASHED index statistics + INDEX_OTHER = 3 // OTHER index statistics + ], + + // values for SQLProcedures PROCEDURE_TYPE column + SQL_PROCEDURETYPE = + [ + UNKNOWN = 0, // procedure is of unknow type + PROCEDURE = 1, // procedure is a procedure + FUNCTION = 2 // procedure is a function + ], + + // private constants + // to define data types (see below) + // + SIGNED_OFFSET = -20, // SQL_SIGNED_OFFSET + UNSIGNED_OFFSET = -22, // SQL_UNSIGNED_OFFSET + + // C Data Types + SQL_C = + [ + CHAR = 1, + WCHAR = -8, + SLONG = 4 + SIGNED_OFFSET, + ULONG = 4 + UNSIGNED_OFFSET, + SSHORT = 5 + SIGNED_OFFSET, + USHORT = 5 + UNSIGNED_OFFSET, + FLOAT = 7, + DOUBLE = 8, + BIT = -7, + STINYINT = -6 + SIGNED_OFFSET, + UTINYINT = -6 + UNSIGNED_OFFSET, + SBIGINT = -5 + SIGNED_OFFSET, + UBIGINT = -5 + UNSIGNED_OFFSET, + BINARY = -2, + TIMESTAMP = 11, + + TYPE_DATE = 91, + TYPE_TIME = 92, + TYPE_TIMESTAMP = 93, + + NUMERIC = 2, + GUID = -11, + DEFAULT = 99, + ARD_TYPE = -99 + ], + + // SQL Data Types + SQL_TYPE = + [ + // Base data types (sql.h) + UNKNOWN = 0, + NULL = 0, + CHAR = 1, + NUMERIC = 2, + DECIMAL = 3, + INTEGER = 4, + SMALLINT = 5, + FLOAT = 6, + REAL = 7, + DOUBLE = 8, + DATETIME = 9, // V3 Only + VARCHAR = 12, + + // Unicode types (sqlucode.h) + WCHAR = -8, + WVARCHAR = -9, + WLONGVARCHAR = -10, + + // Extended data types (sqlext.h) + INTERVAL = 10, // V3 Only + TIME = 10, + TIMESTAMP = 11, + LONGVARCHAR = -1, + BINARY = -2, + VARBINARY = -3, + LONGVARBINARY = -4, + BIGINT = -5, + TINYINT = -6, + BIT = -7, + GUID = -11, // V3 Only + + // One-parameter shortcuts for date/time data types. + TYPE_DATE = 91, + TYPE_TIME = 92, + TYPE_TIMESTAMP = 93, + + // SQL Server Types -150 to -159 (sqlncli.h) + SS_VARIANT = -150, + SS_UDT = -151, + SS_XML = -152, + SS_TABLE = -153, + SS_TIME2 = -154, + SS_TIMESTAMPOFFSET = -155 + ], + + //SQL_ALL_TYPES = 0, + //static public readonly IntPtr SQL_HANDLE_NULL = IntPtr.Zero; + + SQL_LENGTH = + [ + SQL_IGNORE = -6, + SQL_DEFAULT_PARAM = -5, + SQL_NO_TOTAL = -4, + SQL_NTS = -3, + SQL_DATA_AT_EXEC = -2, + SQL_NULL_DATA = -1 + ], + + SQL_DEFAULT_PARAM = -5, + + // column ordinals for SQLProcedureColumns result set + // this column ordinals are not defined in any c/c++ header but in the ODBC Programmer's Reference under SQLProcedureColumns + // + COLUMN_NAME = 4, + COLUMN_TYPE = 5, + DATA_TYPE = 6, + COLUMN_SIZE = 8, + DECIMAL_DIGITS = 10, + NUM_PREC_RADIX = 11, + + SQL_ATTR = + [ + ODBC_VERSION = 200, + CONNECTION_POOLING = 201, + AUTOCOMMIT = 102, + TXN_ISOLATION = 108, + CURRENT_CATALOG = 109, + LOGIN_TIMEOUT = 103, + QUERY_TIMEOUT = 0, + CONNECTION_DEAD = 1209, + + SQL_COPT_SS_BASE = 1200, + SQL_COPT_SS_ENLIST_IN_DTC = (1200 + 7), + SQL_COPT_SS_TXN_ISOLATION = (1200 + 27), + + MAX_LENGTH = 3, + ROW_BIND_TYPE = 5, + CURSOR_TYPE = 6, + RETRIEVE_DATA = 11, + ROW_STATUS_PTR = 25, + ROWS_FETCHED_PTR = 26, + ROW_ARRAY_SIZE = 27, + + // ODBC 3.0 + APP_ROW_DESC = 10010, + APP_PARAM_DESC = 10011, + IMP_ROW_DESC = 10012, + IMP_PARAM_DESC = 10013, + METADATA_ID = 10014, + + // ODBC 4.0 + PRIVATE_DRIVER_LOCATION = 204 + ], + + SQL_RD = + [ + OFF = 0, + ON = 1 + ], + + SQL_GD = + [ + //None = 0, + ANY_COLUMN = 1, + ANY_ORDER = 2, + BLOCK = 4, + BOUND = 8, + OUTPUT_PARAMS = 16 + ], + + //SQLGetInfo +/* + SQL_INFO = + [ + SQL_ACTIVE_CONNECTIONS = 0, + SQL_MAX_DRIVER_CONNECTIONS = 0, + SQL_MAX_CONCURRENT_ACTIVITIES = 1, + SQL_ACTIVE_STATEMENTS = 1, + SQL_DATA_SOURCE_NAME = 2, + SQL_DRIVER_HDBC, + SQL_DRIVER_HENV, + SQL_DRIVER_HSTMT, + SQL_DRIVER_NAME, + SQL_DRIVER_VER, + SQL_FETCH_DIRECTION, + SQL_ODBC_API_CONFORMANCE, + SQL_ODBC_VER, + SQL_ROW_UPDATES, + SQL_ODBC_SAG_CLI_CONFORMANCE, + SQL_SERVER_NAME, + SQL_SEARCH_PATTERN_ESCAPE, + SQL_ODBC_SQL_CONFORMANCE, + + SQL_DATABASE_NAME, + SQL_DBMS_NAME, + SQL_DBMS_VER, + + SQL_ACCESSIBLE_TABLES, + SQL_ACCESSIBLE_PROCEDURES, + SQL_PROCEDURES, + SQL_CONCAT_NULL_BEHAVIOR, + SQL_CURSOR_COMMIT_BEHAVIOR, + SQL_CURSOR_ROLLBACK_BEHAVIOR, + SQL_DATA_SOURCE_READ_ONLY, + SQL_DEFAULT_TXN_ISOLATION, + SQL_EXPRESSIONS_IN_ORDERBY, + SQL_IDENTIFIER_CASE, + SQL_IDENTIFIER_QUOTE_CHAR, + SQL_MAX_COLUMN_NAME_LEN, + SQL_MAX_CURSOR_NAME_LEN, + SQL_MAX_OWNER_NAME_LEN, + SQL_MAX_SCHEMA_NAME_LEN = 32, + SQL_MAX_PROCEDURE_NAME_LEN, + SQL_MAX_QUALIFIER_NAME_LEN, + SQL_MAX_CATALOG_NAME_LEN = 34, + SQL_MAX_TABLE_NAME_LEN, + SQL_MULT_RESULT_SETS, + SQL_MULTIPLE_ACTIVE_TXN, + SQL_OUTER_JOINS, + SQL_SCHEMA_TERM, + SQL_PROCEDURE_TERM, + SQL_CATALOG_NAME_SEPARATOR, + SQL_CATALOG_TERM, + SQL_SCROLL_CONCURRENCY, + SQL_SCROLL_OPTIONS, + SQL_TABLE_TERM, + SQL_TXN_CAPABLE, + SQL_USER_NAME, + + SQL_CONVERT_FUNCTIONS, + SQL_NUMERIC_FUNCTIONS, + SQL_STRING_FUNCTIONS, + SQL_SYSTEM_FUNCTIONS, + SQL_TIMEDATE_FUNCTIONS, + + SQL_CONVERT_BIGINT, + SQL_CONVERT_BINARY, + SQL_CONVERT_BIT, + SQL_CONVERT_CHAR, + SQL_CONVERT_DATE, + SQL_CONVERT_DECIMAL, + SQL_CONVERT_DOUBLE, + SQL_CONVERT_FLOAT, + SQL_CONVERT_INTEGER, + SQL_CONVERT_LONGVARCHAR, + SQL_CONVERT_NUMERIC, + SQL_CONVERT_REAL, + SQL_CONVERT_SMALLINT, + SQL_CONVERT_TIME, + SQL_CONVERT_TIMESTAMP, + SQL_CONVERT_TINYINT, + SQL_CONVERT_VARBINARY, + SQL_CONVERT_VARCHAR, + SQL_CONVERT_LONGVARBINARY, + + SQL_TXN_ISOLATION_OPTION, + SQL_ODBC_SQL_OPT_IEF, + SQL_INTEGRITY = 73, + SQL_CORRELATION_NAME, + SQL_NON_NULLABLE_COLUMNS, + SQL_DRIVER_HLIB, + SQL_DRIVER_ODBC_VER, + SQL_LOCK_TYPES, + SQL_POS_OPERATIONS, + SQL_POSITIONED_STATEMENTS, + SQL_GETDATA_EXTENSIONS, + SQL_BOOKMARK_PERSISTENCE, + SQL_STATIC_SENSITIVITY, + SQL_FILE_USAGE, + SQL_NULL_COLLATION, + SQL_ALTER_TABLE, + SQL_COLUMN_ALIAS, + SQL_GROUP_BY, + SQL_KEYWORDS, + SQL_ORDER_BY_COLUMNS_IN_SELECT, + SQL_SCHEMA_USAGE, + SQL_CATALOG_USAGE, + SQL_QUOTED_IDENTIFIER_CASE, + SQL_SPECIAL_CHARACTERS, + SQL_SUBQUERIES, + SQL_UNION_STATEMENT, + SQL_MAX_COLUMNS_IN_GROUP_BY, + SQL_MAX_COLUMNS_IN_INDEX, + SQL_MAX_COLUMNS_IN_ORDER_BY, + SQL_MAX_COLUMNS_IN_SELECT, + SQL_MAX_COLUMNS_IN_TABLE, + SQL_MAX_INDEX_SIZE, + SQL_MAX_ROW_SIZE_INCLUDES_LONG, + SQL_MAX_ROW_SIZE, + SQL_MAX_STATEMENT_LEN, + SQL_MAX_TABLES_IN_SELECT, + SQL_MAX_USER_NAME_LEN, + SQL_MAX_CHAR_LITERAL_LEN, + SQL_TIMEDATE_ADD_INTERVALS, + SQL_TIMEDATE_DIFF_INTERVALS, + SQL_NEED_LONG_DATA_LEN, + SQL_MAX_BINARY_LITERAL_LEN, + SQL_LIKE_ESCAPE_CLAUSE, + SQL_CATALOG_LOCATION, + SQL_OJ_CAPABILITIES, + + SQL_ACTIVE_ENVIRONMENTS, + SQL_ALTER_DOMAIN, + SQL_SQL_CONFORMANCE, + SQL_DATETIME_LITERALS, + SQL_BATCH_ROW_COUNT, + SQL_BATCH_SUPPORT, + SQL_CONVERT_WCHAR, + SQL_CONVERT_INTERVAL_DAY_TIME, + SQL_CONVERT_INTERVAL_YEAR_MONTH, + SQL_CONVERT_WLONGVARCHAR, + SQL_CONVERT_WVARCHAR, + SQL_CREATE_ASSERTION, + SQL_CREATE_CHARACTER_SET, + SQL_CREATE_COLLATION, + SQL_CREATE_DOMAIN, + SQL_CREATE_SCHEMA, + SQL_CREATE_TABLE, + SQL_CREATE_TRANSLATION, + SQL_CREATE_VIEW, + SQL_DRIVER_HDESC, + SQL_DROP_ASSERTION, + SQL_DROP_CHARACTER_SET, + SQL_DROP_COLLATION, + SQL_DROP_DOMAIN, + SQL_DROP_SCHEMA, + SQL_DROP_TABLE, + SQL_DROP_TRANSLATION, + SQL_DROP_VIEW, + SQL_DYNAMIC_CURSOR_ATTRIBUTES1, + SQL_DYNAMIC_CURSOR_ATTRIBUTES2, + SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES1, + SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES2, + SQL_INDEX_KEYWORDS, + SQL_INFO_SCHEMA_VIEWS, + SQL_KEYSET_CURSOR_ATTRIBUTES1, + SQL_KEYSET_CURSOR_ATTRIBUTES2, + SQL_ODBC_INTERFACE_CONFORMANCE, + SQL_PARAM_ARRAY_ROW_COUNTS, + SQL_PARAM_ARRAY_SELECTS, + SQL_SQL92_DATETIME_FUNCTIONS, + SQL_SQL92_FOREIGN_KEY_DELETE_RULE, + SQL_SQL92_FOREIGN_KEY_UPDATE_RULE, + SQL_SQL92_GRANT, + SQL_SQL92_NUMERIC_VALUE_FUNCTIONS, + SQL_SQL92_PREDICATES, + SQL_SQL92_RELATIONAL_JOIN_OPERATORS, + SQL_SQL92_REVOKE, + SQL_SQL92_ROW_VALUE_CONSTRUCTOR, + SQL_SQL92_STRING_FUNCTIONS, + SQL_SQL92_VALUE_EXPRESSIONS, + SQL_STANDARD_CLI_CONFORMANCE, + SQL_STATIC_CURSOR_ATTRIBUTES1, + SQL_STATIC_CURSOR_ATTRIBUTES2, + SQL_AGGREGATE_FUNCTIONS, + SQL_DDL_INDEX, + SQL_DM_VER, + SQL_INSERT_STATEMENT, + SQL_CONVERT_GUID, + + SQL_XOPEN_CLI_YEAR = 10000, + SQL_CURSOR_SENSITIVITY, + SQL_DESCRIBE_PARAMETER, + SQL_CATALOG_NAME, + SQL_COLLATION_SEQ, + SQL_MAX_IDENTIFIER_LEN, + SQL_ASYNC_MODE = 10021, + SQL_MAX_ASYNC_CONCURRENT_STATEMENTS, + + SQL_DTC_TRANSITION_COST = 1750, + ], +*/ + SQL_OAC = + [ + SQL_OAC_None = 0x0000, + SQL_OAC_LEVEL1 = 0x0001, + SQL_OAC_LEVEL2 = 0x0002 + ], + + SQL_OSC = + [ + SQL_OSC_MINIMUM = 0x0000, + SQL_OSC_CORE = 0x0001, + SQL_OSC_EXTENDED = 0x0002 + ], + + SQL_SCC = + [ + SQL_SCC_XOPEN_CLI_VERSION1 = 0x00000001, + SQL_SCC_ISO92_CLI = 0x00000002 + ], + + SQL_SVE = + [ + SQL_SVE_CASE = 0x00000001, + SQL_SVE_CAST = 0x00000002, + SQL_SVE_COALESCE = 0x00000004, + SQL_SVE_NULLIF = 0x00000008 + ], + + SQL_SSF = + [ + SQL_SSF_CONVERT = 0x00000001, + SQL_SSF_LOWER = 0x00000002, + SQL_SSF_UPPER = 0x00000004, + SQL_SSF_SUBSTRING = 0x00000008, + SQL_SSF_TRANSLATE = 0x00000010, + SQL_SSF_TRIM_BOTH = 0x00000020, + SQL_SSF_TRIM_LEADING = 0x00000040, + SQL_SSF_TRIM_TRAILING = 0x00000080 + ], + + SQL_SP = + [ + //None = 0, + + SQL_SP_EXISTS = 0x00000001, + SQL_SP_ISNOTNULL = 0x00000002, + SQL_SP_ISNULL = 0x00000004, + SQL_SP_MATCH_FULL = 0x00000008, + SQL_SP_MATCH_PARTIAL = 0x00000010, + SQL_SP_MATCH_UNIQUE_FULL = 0x00000020, + SQL_SP_MATCH_UNIQUE_PARTIAL = 0x00000040, + SQL_SP_OVERLAPS = 0x00000080, + SQL_SP_UNIQUE = 0x00000100, + SQL_SP_LIKE = 0x00000200, + SQL_SP_IN = 0x00000400, + SQL_SP_BETWEEN = 0x00000800, + SQL_SP_COMPARISON = 0x00001000, + SQL_SP_QUANTIFIED_COMPARISON = 0x00002000, + + All = 0x0000FFFF + ], + + SQL_OIC = + [ + SQL_OIC_CORE = 1, + SQL_OIC_LEVEL1 = 2, + SQL_OIC_LEVEL2 = 3 + ], + + SQL_USAGE = + [ + SQL_U_DML_STATEMENTS = 0x00000001, + SQL_U_PROCEDURE_INVOCATION = 0x00000002, + SQL_U_TABLE_DEFINITION = 0x00000004, + SQL_U_INDEX_DEFINITION = 0x00000008, + SQL_U_PRIVILEGE_DEFINITION = 0x00000010 + ], + + SQL_GB = + [ + + SQL_GB_NOT_SUPPORTED = 0, + SQL_GB_GROUP_BY_EQUALS_SELECT = 1, + SQL_GB_GROUP_BY_CONTAINS_SELECT = 2, + SQL_GB_NO_RELATION = 3, + SQL_GB_COLLATE = 4 + ], + + SQL_NC = + [ + SQL_NC_END = 0, + SQL_NC_HIGH = 1, + SQL_NC_LOW = 2, + SQL_NC_START = 3 + ], + + SQL_CN = + [ + SQL_CN_None = 0, + SQL_CN_DIFFERENT = 1, + SQL_CN_ANY = 2 + ], + + SQL_NNC = + [ + SQL_NNC_NULL = 0, + SQL_NNC_NON_NULL = 1 + ], + + SQL_CB = + [ + SQL_CB_NULL = 0, + SQL_CB_NON_NULL = 1 + ], + + SQL_FD_FETCH = + [ + SQL_FD_FETCH_NEXT = 0x00000001, + SQL_FD_FETCH_FIRST = 0x00000002, + SQL_FD_FETCH_LAST = 0x00000004, + SQL_FD_FETCH_PRIOR = 0x00000008, + SQL_FD_FETCH_ABSOLUTE = 0x00000010, + SQL_FD_FETCH_RELATIVE = 0x00000020, + SQL_FD_FETCH_BOOKMARK = 0x00000080 + ], + + SQL_SQ = + [ + SQL_SQ_COMPARISON = 0x00000001, + SQL_SQ_EXISTS = 0x00000002, + SQL_SQ_IN = 0x00000004, + SQL_SQ_QUANTIFIED = 0x00000008, + SQL_SQ_CORRELATED_SUBQUERIES = 0x00000010 + ], + + SQL_U = + [ + SQL_U_UNION = 0x00000001, + SQL_U_UNION_ALL = 0x00000002 + ], + + SQL_BP = + [ + SQL_BP_CLOSE = 0x00000001, + SQL_BP_DELETE = 0x00000002, + SQL_BP_DROP = 0x00000004, + SQL_BP_TRANSACTION = 0x00000008, + SQL_BP_UPDATE = 0x00000010, + SQL_BP_OTHER_HSTMT = 0x00000020, + SQL_BP_SCROLL = 0x00000040 + ], + + SQL_QL = + [ + SQL_QL_START = 0x0001, + SQL_QL_END = 0x0002 + ], + + SQL_OJ = + [ + SQL_OJ_LEFT = 0x00000001, + SQL_OJ_RIGHT = 0x00000002, + SQL_OJ_FULL = 0x00000004, + SQL_OJ_NESTED = 0x00000008, + SQL_OJ_NOT_ORDERED = 0x00000010, + SQL_OJ_INNER = 0x00000020, + SQL_OJ_ALL_COMPARISON_OPS = 0x00000040 + ], + + SQL_FN_CVT = + [ + //None = 0, + + SQL_FN_CVT_CONVERT = 0x00000001, + SQL_FN_CVT_CAST = 0x00000002 + ], + + SQL_FN_NUM = + [ + //None = 0, + + SQL_FN_NUM_ABS = 0x00000001, + SQL_FN_NUM_ACOS = 0x00000002, + SQL_FN_NUM_ASIN = 0x00000004, + SQL_FN_NUM_ATAN = 0x00000008, + SQL_FN_NUM_ATAN2 = 0x00000010, + SQL_FN_NUM_CEILING = 0x00000020, + SQL_FN_NUM_COS = 0x00000040, + SQL_FN_NUM_COT = 0x00000080, + SQL_FN_NUM_EXP = 0x00000100, + SQL_FN_NUM_FLOOR = 0x00000200, + SQL_FN_NUM_LOG = 0x00000400, + SQL_FN_NUM_MOD = 0x00000800, + SQL_FN_NUM_SIGN = 0x00001000, + SQL_FN_NUM_SIN = 0x00002000, + SQL_FN_NUM_SQRT = 0x00004000, + SQL_FN_NUM_TAN = 0x00008000, + SQL_FN_NUM_PI = 0x00010000, + SQL_FN_NUM_RAND = 0x00020000, + SQL_FN_NUM_DEGREES = 0x00040000, + SQL_FN_NUM_LOG10 = 0x00080000, + SQL_FN_NUM_POWER = 0x00100000, + SQL_FN_NUM_RADIANS = 0x00200000, + SQL_FN_NUM_ROUND = 0x00400000, + SQL_FN_NUM_TRUNCATE = 0x00800000 + ], + + SQL_SNVF = + [ + SQL_SNVF_BIT_LENGTH = 0x00000001, + SQL_SNVF_CHAR_LENGTH = 0x00000002, + SQL_SNVF_CHARACTER_LENGTH = 0x00000004, + SQL_SNVF_EXTRACT = 0x00000008, + SQL_SNVF_OCTET_LENGTH = 0x00000010, + SQL_SNVF_POSITION = 0x00000020 + ], + + SQL_FN_STR = + [ + //None = 0, + + SQL_FN_STR_CONCAT = 0x00000001, + SQL_FN_STR_INSERT = 0x00000002, + SQL_FN_STR_LEFT = 0x00000004, + SQL_FN_STR_LTRIM = 0x00000008, + SQL_FN_STR_LENGTH = 0x00000010, + SQL_FN_STR_LOCATE = 0x00000020, + SQL_FN_STR_LCASE = 0x00000040, + SQL_FN_STR_REPEAT = 0x00000080, + SQL_FN_STR_REPLACE = 0x00000100, + SQL_FN_STR_RIGHT = 0x00000200, + SQL_FN_STR_RTRIM = 0x00000400, + SQL_FN_STR_SUBSTRING = 0x00000800, + SQL_FN_STR_UCASE = 0x00001000, + SQL_FN_STR_ASCII = 0x00002000, + SQL_FN_STR_CHAR = 0x00004000, + SQL_FN_STR_DIFFERENCE = 0x00008000, + SQL_FN_STR_LOCATE_2 = 0x00010000, + SQL_FN_STR_SOUNDEX = 0x00020000, + SQL_FN_STR_SPACE = 0x00040000, + SQL_FN_STR_BIT_LENGTH = 0x00080000, + SQL_FN_STR_CHAR_LENGTH = 0x00100000, + SQL_FN_STR_CHARACTER_LENGTH = 0x00200000, + SQL_FN_STR_OCTET_LENGTH = 0x00400000, + SQL_FN_STR_POSITION = 0x00800000 + ], + + SQL_FN_SYSTEM = + [ + //None = 0, + + SQL_FN_SYS_USERNAME = 0x00000001, + SQL_FN_SYS_DBNAME = 0x00000002, + SQL_FN_SYS_IFNULL = 0x00000004 + ], + + SQL_FN_TD = + [ + //None = 0, + + SQL_FN_TD_NOW = 0x00000001, + SQL_FN_TD_CURDATE = 0x00000002, + SQL_FN_TD_DAYOFMONTH = 0x00000004, + SQL_FN_TD_DAYOFWEEK = 0x00000008, + SQL_FN_TD_DAYOFYEAR = 0x00000010, + SQL_FN_TD_MONTH = 0x00000020, + SQL_FN_TD_QUARTER = 0x00000040, + SQL_FN_TD_WEEK = 0x00000080, + SQL_FN_TD_YEAR = 0x00000100, + SQL_FN_TD_CURTIME = 0x00000200, + SQL_FN_TD_HOUR = 0x00000400, + SQL_FN_TD_MINUTE = 0x00000800, + SQL_FN_TD_SECOND = 0x00001000, + SQL_FN_TD_TIMESTAMPADD = 0x00002000, + SQL_FN_TD_TIMESTAMPDIFF = 0x00004000, + SQL_FN_TD_DAYNAME = 0x00008000, + SQL_FN_TD_MONTHNAME = 0x00010000, + SQL_FN_TD_CURRENT_DATE = 0x00020000, + SQL_FN_TD_CURRENT_TIME = 0x00040000, + SQL_FN_TD_CURRENT_TIMESTAMP = 0x00080000, + SQL_FN_TD_EXTRACT = 0x00100000 + ], + + SQL_SDF = + [ + SQL_SDF_CURRENT_DATE = 0x00000001, + SQL_SDF_CURRENT_TIME = 0x00000002, + SQL_SDF_CURRENT_TIMESTAMP = 0x00000004 + ], + + SQL_TSI = + [ + //None = 0, + + SQL_TSI_FRAC_SECOND = 0x00000001, + SQL_TSI_SECOND = 0x00000002, + SQL_TSI_MINUTE = 0x00000004, + SQL_TSI_HOUR = 0x00000008, + SQL_TSI_DAY = 0x00000010, + SQL_TSI_WEEK = 0x00000020, + SQL_TSI_MONTH = 0x00000040, + SQL_TSI_QUARTER = 0x00000080, + SQL_TSI_YEAR = 0x00000100 + ], + + SQL_AF = + [ + //None = 0, + + SQL_AF_AVG = 0x00000001, + SQL_AF_COUNT = 0x00000002, + SQL_AF_MAX = 0x00000004, + SQL_AF_MIN = 0x00000008, + SQL_AF_SUM = 0x00000010, + SQL_AF_DISTINCT = 0x00000020, + SQL_AF_ALL = 0x00000040, + + All = 0xFF + ], + + SQL_SC = + [ + //None = 0, + + SQL_SC_SQL92_ENTRY = 0x00000001, + SQL_SC_FIPS127_2_TRANSITIONAL = 0x00000002, + SQL_SC_SQL92_INTERMEDIATE = 0x00000004, + SQL_SC_SQL92_FULL = 0x00000008 + ], + + SQL_DL_SQL92 = + [ + SQL_DL_SQL92_DATE = 0x00000001, + SQL_DL_SQL92_TIME = 0x00000002, + SQL_DL_SQL92_TIMESTAMP = 0x00000004, + SQL_DL_SQL92_INTERVAL_YEAR = 0x00000008, + SQL_DL_SQL92_INTERVAL_MONTH = 0x00000010, + SQL_DL_SQL92_INTERVAL_DAY = 0x00000020, + SQL_DL_SQL92_INTERVAL_HOUR = 0x00000040, + SQL_DL_SQL92_INTERVAL_MINUTE = 0x00000080, + SQL_DL_SQL92_INTERVAL_SECOND = 0x00000100, + SQL_DL_SQL92_INTERVAL_YEAR_TO_MONTH = 0x00000200, + SQL_DL_SQL92_INTERVAL_DAY_TO_HOUR = 0x00000400, + SQL_DL_SQL92_INTERVAL_DAY_TO_MINUTE = 0x00000800, + SQL_DL_SQL92_INTERVAL_DAY_TO_SECOND = 0x00001000, + SQL_DL_SQL92_INTERVAL_HOUR_TO_MINUTE = 0x00002000, + SQL_DL_SQL92_INTERVAL_HOUR_TO_SECOND = 0x00004000, + SQL_DL_SQL92_INTERVAL_MINUTE_TO_SECOND = 0x00008000 + ], + + SQL_IK = + [ + SQL_IK_NONE = 0x00000000, + SQL_IK_ASC = 0x00000001, + SQL_IK_DESC = 0x00000002, + SQL_IK_ALL = 0x00000003 //SQL_IK_ASC | SQL_IK_DESC + ], + + SQL_ISV = + [ + SQL_ISV_ASSERTIONS = 0x00000001, + SQL_ISV_CHARACTER_SETS = 0x00000002, + SQL_ISV_CHECK_CONSTRAINTS = 0x00000004, + SQL_ISV_COLLATIONS = 0x00000008, + SQL_ISV_COLUMN_DOMAIN_USAGE = 0x00000010, + SQL_ISV_COLUMN_PRIVILEGES = 0x00000020, + SQL_ISV_COLUMNS = 0x00000040, + SQL_ISV_CONSTRAINT_COLUMN_USAGE = 0x00000080, + SQL_ISV_CONSTRAINT_TABLE_USAGE = 0x00000100, + SQL_ISV_DOMAIN_CONSTRAINTS = 0x00000200, + SQL_ISV_DOMAINS = 0x00000400, + SQL_ISV_KEY_COLUMN_USAGE = 0x00000800, + SQL_ISV_REFERENTIAL_CONSTRAINTS = 0x00001000, + SQL_ISV_SCHEMATA = 0x00002000, + SQL_ISV_SQL_LANGUAGES = 0x00004000, + SQL_ISV_TABLE_CONSTRAINTS = 0x00008000, + SQL_ISV_TABLE_PRIVILEGES = 0x00010000, + SQL_ISV_TABLES = 0x00020000, + SQL_ISV_TRANSLATIONS = 0x00040000, + SQL_ISV_USAGE_PRIVILEGES = 0x00080000, + SQL_ISV_VIEW_COLUMN_USAGE = 0x00100000, + SQL_ISV_VIEW_TABLE_USAGE = 0x00200000, + SQL_ISV_VIEWS = 0x00400000 + ], + + SQL_SRJO = + [ + //None = 0, + + SQL_SRJO_CORRESPONDING_CLAUSE = 0x00000001, + SQL_SRJO_CROSS_JOIN = 0x00000002, + SQL_SRJO_EXCEPT_JOIN = 0x00000004, + SQL_SRJO_FULL_OUTER_JOIN = 0x00000008, + SQL_SRJO_INNER_JOIN = 0x00000010, + SQL_SRJO_INTERSECT_JOIN = 0x00000020, + SQL_SRJO_LEFT_OUTER_JOIN = 0x00000040, + SQL_SRJO_NATURAL_JOIN = 0x00000080, + SQL_SRJO_RIGHT_OUTER_JOIN = 0x00000100, + SQL_SRJO_UNION_JOIN = 0x00000200 + ], + + SQL_SRVC = + [ + SQL_SRVC_VALUE_EXPRESSION = 0x00000001, + SQL_SRVC_NULL = 0x00000002, + SQL_SRVC_DEFAULT = 0x00000004, + SQL_SRVC_ROW_SUBQUERY = 0x00000008 + ], + + //public static readonly int SQL_OV_ODBC3 = 3; + //public const Int32 SQL_NTS = -3; //flags for null-terminated string + + //Pooling + SQL_CP = + [ + OFF = 0, + ONE_PER_DRIVER = 1, + ONE_PER_HENV = 2 + ], + +/* + public const Int32 SQL_CD_TRUE = 1; + public const Int32 SQL_CD_FALSE = 0; + + public const Int32 SQL_DTC_DONE = 0; + public const Int32 SQL_IS_POINTER = -4; + public const Int32 SQL_IS_PTR = 1; +*/ + SQL_DRIVER = + [ + NOPROMPT = 0, + COMPLETE = 1, + PROMPT = 2, + COMPLETE_REQUIRED = 3 + ], + + // Column set for SQLPrimaryKeys + SQL_PRIMARYKEYS = + [ + /* + CATALOGNAME = 1, // TABLE_CAT + SCHEMANAME = 2, // TABLE_SCHEM + TABLENAME = 3, // TABLE_NAME + */ + COLUMNNAME = 4 // COLUMN_NAME + /* + KEY_SEQ = 5, // KEY_SEQ + PKNAME = 6, // PK_NAME + */ + ], + + // Column set for SQLStatistics + SQL_STATISTICS = + [ + /* + CATALOGNAME = 1, // TABLE_CAT + SCHEMANAME = 2, // TABLE_SCHEM + TABLENAME = 3, // TABLE_NAME + NONUNIQUE = 4, // NON_UNIQUE + INDEXQUALIFIER = 5, // INDEX_QUALIFIER + */ + INDEXNAME = 6, // INDEX_NAME + /* + TYPE = 7, // TYPE + */ + ORDINAL_POSITION = 8, // ORDINAL_POSITION + COLUMN_NAME = 9 // COLUMN_NAME + /* + ASC_OR_DESC = 10, // ASC_OR_DESC + CARDINALITY = 11, // CARDINALITY + PAGES = 12, // PAGES + FILTER_CONDITION = 13, // FILTER_CONDITION + */ + ], + + // Column set for SQLSpecialColumns + SQL_SPECIALCOLUMNSET = + [ + /* + SCOPE = 1, // SCOPE + */ + COLUMN_NAME = 2 // COLUMN_NAME + /* + DATA_TYPE = 3, // DATA_TYPE + TYPE_NAME = 4, // TYPE_NAME + COLUMN_SIZE = 5, // COLUMN_SIZE + BUFFER_LENGTH = 6, // BUFFER_LENGTH + DECIMAL_DIGITS = 7, // DECIMAL_DIGITS + PSEUDO_COLUMN = 8, // PSEUDO_COLUMN + */ + ], + + SQL_DIAG = + [ + CURSOR_ROW_COUNT= -1249, + ROW_NUMBER = -1248, + COLUMN_NUMBER = -1247, + RETURNCODE = 1, + NUMBER = 2, + ROW_COUNT = 3, + SQLSTATE = 4, + NATIVE = 5, + MESSAGE_TEXT = 6, + DYNAMIC_FUNCTION = 7, + CLASS_ORIGIN = 8, + SUBCLASS_ORIGIN = 9, + CONNECTION_NAME = 10, + SERVER_NAME = 11, + DYNAMIC_FUNCTION_CODE = 12 + ], + + SQL_SU = + [ + SQL_SU_DML_STATEMENTS = 0x00000001, + SQL_SU_PROCEDURE_INVOCATION = 0x00000002, + SQL_SU_TABLE_DEFINITION = 0x00000004, + SQL_SU_INDEX_DEFINITION = 0x00000008, + SQL_SU_PRIVILEGE_DEFINITION = 0x00000010 + ] +] diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.mproj b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.mproj new file mode 100644 index 0000000000..543b5652c8 --- /dev/null +++ b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.mproj @@ -0,0 +1,123 @@ + + + Debug + 2.0 + {75a2dabe-6c5b-498e-8df1-e85d4483a7dc} + Exe + MyRootNamespace + MyAssemblyName + False + False + False + False + False + False + False + False + False + False + 1000 + Yes + OdfeSqlOdbcPBIConnector + + + false + + bin\Debug\ + + + false + bin\Release\ + + + + + + + + + + Code + + + Code + + + Code + + + Code + + + Code + + + Code + + + Code + + + Code + + + Code + + + Code + + + Content + + + Content + + + Code + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.pq b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.pq new file mode 100644 index 0000000000..f1af5f74b6 --- /dev/null +++ b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.pq @@ -0,0 +1,233 @@ +// This file contains Data Connector logic +section OdfeSqlOdbcPBIConnector; + +// When set to true, additional trace information will be written out to the User log. +// This should be set to false before release. Tracing is done through a call to +// Diagnostics.LogValue(). When EnableTraceOutput is set to false, the call becomes a +// no-op and simply returns the original value. +EnableTraceOutput = true; + +[DataSource.Kind="OdfeSqlOdbcPBIConnector", Publish="OdfeSqlOdbcPBIConnector.Publish"] +shared OdfeSqlOdbcPBIConnector.Contents = Value.ReplaceType(OdfeSqlOdbcPBIConnectorImpl, OdfeSqlOdbcPBIConnectorType); + +// Wrapper function to provide additional UI customization. +OdfeSqlOdbcPBIConnectorType = type function ( + Host as (type text meta [ + Documentation.FieldCaption = "Host", + Documentation.FieldDescription = "The hostname of the Open Distro For Elasticsearch server.", + Documentation.SampleValues = { "localhost" } + ]), + optional Port as (type number meta [ + Documentation.FieldCaption = "Port", + Documentation.FieldDescription = "The port of the Open Distro For Elasticsearch server is running on.", + Documentation.SampleValues = { 9200 } + ]) + ) + as table meta [ + Documentation.Name = "Open Distro For Elasticsearch" + ]; + +OdfeSqlOdbcPBIConnectorImpl = (Host as text, optional Port as number) as table => + let + Credential = Extension.CurrentCredential(), + AuthenticationMode = Credential[AuthenticationKind], + + // Sets connection string properties for authentication. + CredentialConnectionString = + if AuthenticationMode = "UsernamePassword" then + [ + Auth = "BASIC", + UID = Credential[Username], + PWD = Credential[Password] + ] + else if AuthenticationMode = "Key" then + [ + Auth = "AWS_SIGV4", + Region = Credential[Key] + ] + else + [ + Auth = "NONE" + ], + + // Sets connection string properties for encrypted connections. + EncryptedConnectionString = + if Credential[EncryptConnection] = null or Credential[EncryptConnection] = true then + [ + UseSSL = 1 + ] + else + [ + UseSSL = 0 + ], + + // Set host & port in connection string. + // Do not include port in connection string for aws server connection. + Server = + if Port <> null then + [ + Host = Host, + Port = Port + ] + else + [ + Host = Host + ], + + ConnectionString = [ + Driver = "ODFE SQL ODBC Driver" + ], + + SQLGetTypeInfo = (types) => + if (EnableTraceOutput <> true) then types else + let + // Outputting the entire table might be too large, and result in the value being truncated. + // We can output a row at a time instead with Table.TransformRows() + rows = Table.TransformRows(types, each Diagnostics.LogValue("SQLGetTypeInfo " & _[TYPE_NAME], _)), + toTable = Table.FromRecords(rows) + in + Value.ReplaceType(toTable, Value.Type(types)), + + // SQLColumns is a function handler that receives the results of an ODBC call to SQLColumns(). + SQLColumns = (catalogName, schemaName, tableName, columnName, source) => + if (EnableTraceOutput <> true) then source else + // the if statement conditions will force the values to evaluated/written to diagnostics + if (Diagnostics.LogValue("SQLColumns.TableName", tableName) <> "***" and Diagnostics.LogValue("SQLColumns.ColumnName", columnName) <> "***") then + let + // Outputting the entire table might be too large, and result in the value being truncated. + // We can output a row at a time instead with Table.TransformRows() + rows = Table.TransformRows(source, each Diagnostics.LogValue("SQLColumns", _)), + toTable = Table.FromRecords(rows) + in + Value.ReplaceType(toTable, Value.Type(source)) + else + source, + + // Add support for `LIMIT` and `OFFSET` clauses (rather than `TOP`) + AstVisitor = [ + // format is "LIMIT [,]" - ex. LIMIT 2,10 or LIMIT 10 + LimitClause = (skip, take) => + if (take = null) then + ... + else + let + skip = + if (skip = null or skip = 0) then + "" + else + Number.ToText(skip) & "," + in + [ + Text = Text.Format("LIMIT #{0}#{1}", { skip, take }), + Location = "AfterQuerySpecification" + ] + ], + + OdbcDatasource = Odbc.DataSource(ConnectionString & Server & CredentialConnectionString & EncryptedConnectionString, [ + // Do not view the tables grouped by their schema names. + HierarchicalNavigation = false, + // Prevents execution of native SQL statements. Extensions should set this to true. + HideNativeQuery = true, + // Allows upconversion of numeric types + SoftNumbers = true, + // Allow upconversion / resizing of numeric and string types + TolerateConcatOverflow = true, + // Enables connection pooling via the system ODBC manager + ClientConnectionPooling = true, + + // These values should be set by previous steps + AstVisitor = AstVisitor, + SQLColumns = SQLColumns, + SQLGetTypeInfo = SQLGetTypeInfo, + + OnError = OnOdbcError, + + // Connection string properties used for encrypted connections. + CredentialConnectionString = EncryptedConnectionString + ]) + in + OdbcDatasource; + +// Handles ODBC errors. +OnOdbcError = (errorRecord as record) => + let + ErrorMessage = errorRecord[Message], + ConnectionHostPort = errorRecord[Detail][DataSourcePath], + + IsDriverNotInstalled = Text.Contains(ErrorMessage, "doesn't correspond to an installed ODBC driver"), + + OdbcError = errorRecord[Detail][OdbcErrors]{0}, + OdbcErrorCode = OdbcError[NativeError], + + // Failed to connect to given host + IsHostUnreachable = + OdbcErrorCode = 202 + in + if IsDriverNotInstalled then + error Error.Record("", "The Open Distro For Elasticsearch SQL ODBC driver is not installed. Please install the driver") + else if IsHostUnreachable then + error Error.Record("", "Couldn't reach server. Please double-check the host, port and auth.") + else + error errorRecord; + +// Data Source Kind description +OdfeSqlOdbcPBIConnector = [ + // Required for use with Power BI Service. + TestConnection = (dataSourcePath) => + let + json = Json.Document(dataSourcePath), + Host = json[Host], + Port = json[Port] + in + { "OdfeSqlOdbcPBIConnector.Contents", Host, Port }, + + // Authentication modes + Authentication = [ + Implicit = [ + Label = "NONE" + ], + UsernamePassword = [ + Label = "BASIC" + ], + Key = [ + Label = "AWS_SIGV4", + KeyLabel = "Region" + ] + ], + + // Enable Encryption + SupportsEncryption = true, + + Label = Extension.LoadString("DataSourceLabel") +]; + +// Data Source UI publishing description +OdfeSqlOdbcPBIConnector.Publish = [ + Beta = true, + Category = "Other", + ButtonText = { Extension.LoadString("ButtonTitle"), Extension.LoadString("ButtonHelp") }, + LearnMoreUrl = "https://opendistro.github.io/for-elasticsearch/", + SupportsDirectQuery = true, + SourceImage = OdfeSqlOdbcPBIConnector.Icons, + SourceTypeImage = OdfeSqlOdbcPBIConnector.Icons +]; + +OdfeSqlOdbcPBIConnector.Icons = [ + Icon16 = { Extension.Contents("OdfeSqlOdbcPBIConnector16.png"), Extension.Contents("OdfeSqlOdbcPBIConnector20.png"), Extension.Contents("OdfeSqlOdbcPBIConnector24.png"), Extension.Contents("OdfeSqlOdbcPBIConnector32.png") }, + Icon32 = { Extension.Contents("OdfeSqlOdbcPBIConnector32.png"), Extension.Contents("OdfeSqlOdbcPBIConnector40.png"), Extension.Contents("OdfeSqlOdbcPBIConnector48.png"), Extension.Contents("OdfeSqlOdbcPBIConnector64.png") } +]; + +// Load common library functions +Extension.LoadFunction = (name as text) => + let + binary = Extension.Contents(name), + asText = Text.FromBinary(binary) + in + Expression.Evaluate(asText, #shared); + +// Diagnostics module contains multiple functions. . +Diagnostics = Extension.LoadFunction("Diagnostics.pqm"); +Diagnostics.LogValue = if (EnableTraceOutput) then Diagnostics[LogValue] else (prefix, value) => value; + +// OdbcConstants contains numeric constants from the ODBC header files, and helper function to create bitfield values. +ODBC = Extension.LoadFunction("OdbcConstants.pqm"); diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.query.pq b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.query.pq new file mode 100644 index 0000000000..def55af3e2 --- /dev/null +++ b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector.query.pq @@ -0,0 +1,310 @@ +// This file contains queries to test your data connector +section OdfeSqlOdbcPBIConnector.UnitTests; + +shared MyExtension.UnitTest = +[ + // Common variables for all tests + Host = "localhost", + Port = 9200, + + facts = + { + Fact("Connection Test", + 7, + let + Source = OdfeSqlOdbcPBIConnector.Contents(Host,Port), + no_of_columns = Table.ColumnCount(Source) + in + no_of_columns + ) + }, + + report = Facts.Summarize(facts) +][report]; + +/// COMMON UNIT TESTING CODE +Fact = (_subject as text, _expected, _actual) as record => +[ expected = try _expected, + safeExpected = if expected[HasError] then "Expected : "& @ValueToText(expected[Error]) else expected[Value], + actual = try _actual, + safeActual = if actual[HasError] then "Actual : "& @ValueToText(actual[Error]) else actual[Value], + attempt = try safeExpected = safeActual, + result = if attempt[HasError] or not attempt[Value] then "Failure ⛔" else "Success ✓", + resultOp = if result = "Success ✓" then " = " else " <> ", + addendumEvalAttempt = if attempt[HasError] then @ValueToText(attempt[Error]) else "", + addendumEvalExpected = try @ValueToText(safeExpected) otherwise "...", + addendumEvalActual = try @ValueToText (safeActual) otherwise "...", + fact = + [ Result = result &" "& addendumEvalAttempt, + Notes =_subject, + Details = " ("& addendumEvalExpected & resultOp & addendumEvalActual &")" + ] +][fact]; + +Facts = (_subject as text, _predicates as list) => List.Transform(_predicates, each Fact(_subject,_{0},_{1})); + +Facts.Summarize = (_facts as list) as table => +[ Fact.CountSuccesses = (count, i) => + [ result = try i[Result], + sum = if result[HasError] or not Text.StartsWith(result[Value], "Success") then count else count + 1 + ][sum], + passed = List.Accumulate(_facts, 0, Fact.CountSuccesses), + total = List.Count(_facts), + format = if passed = total then "All #{0} Passed !!! ✓" else "#{0} Passed ☺ #{1} Failed ☹", + result = if passed = total then "Success" else "⛔", + rate = Number.IntegerDivide(100*passed, total), + header = + [ Result = result, + Notes = Text.Format(format, {passed, total-passed}), + Details = Text.Format("#{0}% success rate", {rate}) + ], + report = Table.FromRecords(List.Combine({{header},_facts})) +][report]; + +ValueToText = (value, optional depth) => + let + _canBeIdentifier = (x) => + let + keywords = {"and", "as", "each", "else", "error", "false", "if", "in", "is", "let", "meta", "not", "otherwise", "or", "section", "shared", "then", "true", "try", "type" }, + charAlpha = (c as number) => (c>= 65 and c <= 90) or (c>= 97 and c <= 122) or c=95, + charDigit = (c as number) => c>= 48 and c <= 57 + in + try + charAlpha(Character.ToNumber(Text.At(x,0))) + and + List.MatchesAll( + Text.ToList(x), + (c)=> let num = Character.ToNumber(c) in charAlpha(num) or charDigit(num) + ) + and not + List.MatchesAny( keywords, (li)=> li=x ) + otherwise + false, + + Serialize.Binary = (x) => "#binary(" & Serialize(Binary.ToList(x)) & ") ", + + Serialize.Date = (x) => "#date(" & + Text.From(Date.Year(x)) & ", " & + Text.From(Date.Month(x)) & ", " & + Text.From(Date.Day(x)) & ") ", + + Serialize.Datetime = (x) => "#datetime(" & + Text.From(Date.Year(DateTime.Date(x))) & ", " & + Text.From(Date.Month(DateTime.Date(x))) & ", " & + Text.From(Date.Day(DateTime.Date(x))) & ", " & + Text.From(Time.Hour(DateTime.Time(x))) & ", " & + Text.From(Time.Minute(DateTime.Time(x))) & ", " & + Text.From(Time.Second(DateTime.Time(x))) & ") ", + + Serialize.Datetimezone =(x) => let + dtz = DateTimeZone.ToRecord(x) + in + "#datetimezone(" & + Text.From(dtz[Year]) & ", " & + Text.From(dtz[Month]) & ", " & + Text.From(dtz[Day]) & ", " & + Text.From(dtz[Hour]) & ", " & + Text.From(dtz[Minute]) & ", " & + Text.From(dtz[Second]) & ", " & + Text.From(dtz[ZoneHours]) & ", " & + Text.From(dtz[ZoneMinutes]) & ") ", + + Serialize.Duration = (x) => let + dur = Duration.ToRecord(x) + in + "#duration(" & + Text.From(dur[Days]) & ", " & + Text.From(dur[Hours]) & ", " & + Text.From(dur[Minutes]) & ", " & + Text.From(dur[Seconds]) & ") ", + + Serialize.Function = (x) => _serialize_function_param_type( + Type.FunctionParameters(Value.Type(x)), + Type.FunctionRequiredParameters(Value.Type(x)) ) & + " as " & + _serialize_function_return_type(Value.Type(x)) & + " => (...) ", + + Serialize.List = (x) => "{" & + List.Accumulate(x, "", (seed,item) => if seed="" then Serialize(item) else seed & ", " & Serialize(item)) & + "} ", + + Serialize.Logical = (x) => Text.From(x), + + Serialize.Null = (x) => "null", + + Serialize.Number = (x) => + let Text.From = (i as number) as text => + if Number.IsNaN(i) then "#nan" else + if i=Number.PositiveInfinity then "#infinity" else + if i=Number.NegativeInfinity then "-#infinity" else + Text.From(i) + in + Text.From(x), + + Serialize.Record = (x) => "[ " & + List.Accumulate( + Record.FieldNames(x), + "", + (seed,item) => + (if seed="" then Serialize.Identifier(item) else seed & ", " & Serialize.Identifier(item)) & " = " & Serialize(Record.Field(x, item)) + ) & + " ] ", + + Serialize.Table = (x) => "#table( type " & + _serialize_table_type(Value.Type(x)) & + ", " & + Serialize(Table.ToRows(x)) & + ") ", + + Serialize.Text = (x) => """" & + _serialize_text_content(x) & + """", + + _serialize_text_content = (x) => let + escapeText = (n as number) as text => "#(#)(" & Text.PadStart(Number.ToText(n, "X", "en-US"), 4, "0") & ")" + in + List.Accumulate( + List.Transform( + Text.ToList(x), + (c) => let n=Character.ToNumber(c) in + if n = 9 then "#(#)(tab)" else + if n = 10 then "#(#)(lf)" else + if n = 13 then "#(#)(cr)" else + if n = 34 then """""" else + if n = 35 then "#(#)(#)" else + if n < 32 then escapeText(n) else + if n < 127 then Character.FromNumber(n) else + escapeText(n) + ), + "", + (s,i)=>s&i + ), + + Serialize.Identifier = (x) => + if _canBeIdentifier(x) then + x + else + "#""" & + _serialize_text_content(x) & + """", + + Serialize.Time = (x) => "#time(" & + Text.From(Time.Hour(x)) & ", " & + Text.From(Time.Minute(x)) & ", " & + Text.From(Time.Second(x)) & ") ", + + Serialize.Type = (x) => "type " & _serialize_typename(x), + + + _serialize_typename = (x, optional funtype as logical) => /* Optional parameter: Is this being used as part of a function signature? */ + let + isFunctionType = (x as type) => try if Type.FunctionReturn(x) is type then true else false otherwise false, + isTableType = (x as type) => try if Type.TableSchema(x) is table then true else false otherwise false, + isRecordType = (x as type) => try if Type.ClosedRecord(x) is type then true else false otherwise false, + isListType = (x as type) => try if Type.ListItem(x) is type then true else false otherwise false + in + + if funtype=null and isTableType(x) then _serialize_table_type(x) else + if funtype=null and isListType(x) then "{ " & @_serialize_typename( Type.ListItem(x) ) & " }" else + if funtype=null and isFunctionType(x) then "function " & _serialize_function_type(x) else + if funtype=null and isRecordType(x) then _serialize_record_type(x) else + + if x = type any then "any" else + let base = Type.NonNullable(x) in + (if Type.IsNullable(x) then "nullable " else "") & + (if base = type anynonnull then "anynonnull" else + if base = type binary then "binary" else + if base = type date then "date" else + if base = type datetime then "datetime" else + if base = type datetimezone then "datetimezone" else + if base = type duration then "duration" else + if base = type logical then "logical" else + if base = type none then "none" else + if base = type null then "null" else + if base = type number then "number" else + if base = type text then "text" else + if base = type time then "time" else + if base = type type then "type" else + + /* Abstract types: */ + if base = type function then "function" else + if base = type table then "table" else + if base = type record then "record" else + if base = type list then "list" else + + "any /*Actually unknown type*/"), + + _serialize_table_type = (x) => + let + schema = Type.TableSchema(x) + in + "table " & + (if Table.IsEmpty(schema) then "" else + "[" & List.Accumulate( + List.Transform( + Table.ToRecords(Table.Sort(schema,"Position")), + each Serialize.Identifier(_[Name]) & " = " & _[Kind]), + "", + (seed,item) => (if seed="" then item else seed & ", " & item ) + ) & "] " ), + + _serialize_record_type = (x) => + let flds = Type.RecordFields(x) + in + if Record.FieldCount(flds)=0 then "record" else + "[" & List.Accumulate( + Record.FieldNames(flds), + "", + (seed,item) => + seed & + (if seed<>"" then ", " else "") & + (Serialize.Identifier(item) & "=" & _serialize_typename(Record.Field(flds,item)[Type]) ) + ) & + (if Type.IsOpenRecord(x) then ",..." else "") & + "]", + + _serialize_function_type = (x) => _serialize_function_param_type( + Type.FunctionParameters(x), + Type.FunctionRequiredParameters(x) ) & + " as " & + _serialize_function_return_type(x), + + _serialize_function_param_type = (t,n) => + let + funsig = Table.ToRecords( + Table.TransformColumns( + Table.AddIndexColumn( Record.ToTable( t ), "isOptional", 1 ), + { "isOptional", (x)=> x>n } ) ) + in + "(" & + List.Accumulate( + funsig, + "", + (seed,item)=> + (if seed="" then "" else seed & ", ") & + (if item[isOptional] then "optional " else "") & + Serialize.Identifier(item[Name]) & " as " & _serialize_typename(item[Value], true) ) + & ")", + + _serialize_function_return_type = (x) => _serialize_typename(Type.FunctionReturn(x), true), + + Serialize = (x) as text => + if x is binary then try Serialize.Binary(x) otherwise "null /*serialize failed*/" else + if x is date then try Serialize.Date(x) otherwise "null /*serialize failed*/" else + if x is datetime then try Serialize.Datetime(x) otherwise "null /*serialize failed*/" else + if x is datetimezone then try Serialize.Datetimezone(x) otherwise "null /*serialize failed*/" else + if x is duration then try Serialize.Duration(x) otherwise "null /*serialize failed*/" else + if x is function then try Serialize.Function(x) otherwise "null /*serialize failed*/" else + if x is list then try Serialize.List(x) otherwise "null /*serialize failed*/" else + if x is logical then try Serialize.Logical(x) otherwise "null /*serialize failed*/" else + if x is null then try Serialize.Null(x) otherwise "null /*serialize failed*/" else + if x is number then try Serialize.Number(x) otherwise "null /*serialize failed*/" else + if x is record then try Serialize.Record(x) otherwise "null /*serialize failed*/" else + if x is table then try Serialize.Table(x) otherwise "null /*serialize failed*/" else + if x is text then try Serialize.Text(x) otherwise "null /*serialize failed*/" else + if x is time then try Serialize.Time(x) otherwise "null /*serialize failed*/" else + if x is type then try Serialize.Type(x) otherwise "null /*serialize failed*/" else + "[#_unable_to_serialize_#]" + in + try Serialize(value) otherwise ""; diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector16.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector16.png new file mode 100644 index 0000000000..95578aba51 Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector16.png differ diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector20.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector20.png new file mode 100644 index 0000000000..cdcca25637 Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector20.png differ diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector24.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector24.png new file mode 100644 index 0000000000..81e49796f4 Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector24.png differ diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector32.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector32.png new file mode 100644 index 0000000000..9896675643 Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector32.png differ diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector40.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector40.png new file mode 100644 index 0000000000..90d4b584ba Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector40.png differ diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector48.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector48.png new file mode 100644 index 0000000000..6c763dd408 Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector48.png differ diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector64.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector64.png new file mode 100644 index 0000000000..4450a97f3d Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector64.png differ diff --git a/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector80.png b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector80.png new file mode 100644 index 0000000000..670894f23b Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/OdfeSqlOdbcPBIConnector80.png differ diff --git a/sql-odbc/src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez b/sql-odbc/src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez new file mode 100644 index 0000000000..dc5ae9d8ab Binary files /dev/null and b/sql-odbc/src/PowerBIConnector/bin/Release/OdfeSqlOdbcPBIConnector.mez differ diff --git a/sql-odbc/src/PowerBIConnector/resources.resx b/sql-odbc/src/PowerBIConnector/resources.resx new file mode 100644 index 0000000000..8fe822c55c --- /dev/null +++ b/sql-odbc/src/PowerBIConnector/resources.resx @@ -0,0 +1,129 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + Connect to Open Distro For Elasticsearch + + + Open Distro For Elasticsearch + + + Open Distro For Elasticsearch + + \ No newline at end of file diff --git a/sql-odbc/src/TableauConnector/odfe_sql_odbc/connection-dialog.tcd b/sql-odbc/src/TableauConnector/odfe_sql_odbc/connection-dialog.tcd index 24c364c615..2ac573f598 100644 --- a/sql-odbc/src/TableauConnector/odfe_sql_odbc/connection-dialog.tcd +++ b/sql-odbc/src/TableauConnector/odfe_sql_odbc/connection-dialog.tcd @@ -10,5 +10,7 @@ + + diff --git a/sql-odbc/src/TableauConnector/odfe_sql_odbc/manifest.xml b/sql-odbc/src/TableauConnector/odfe_sql_odbc/manifest.xml index 5679e004a9..6afbc42514 100644 --- a/sql-odbc/src/TableauConnector/odfe_sql_odbc/manifest.xml +++ b/sql-odbc/src/TableauConnector/odfe_sql_odbc/manifest.xml @@ -1,10 +1,10 @@ - + - - + + diff --git a/sql-odbc/src/TableauConnector/odfe_sql_odbc/odfe_sql_odbc.taco b/sql-odbc/src/TableauConnector/odfe_sql_odbc/odfe_sql_odbc.taco index 2f3e1edfa2..b90cd86d3c 100644 Binary files a/sql-odbc/src/TableauConnector/odfe_sql_odbc/odfe_sql_odbc.taco and b/sql-odbc/src/TableauConnector/odfe_sql_odbc/odfe_sql_odbc.taco differ diff --git a/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/connection-dialog.tcd b/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/connection-dialog.tcd index 20560b8d15..0f936b3861 100644 --- a/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/connection-dialog.tcd +++ b/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/connection-dialog.tcd @@ -10,5 +10,7 @@ + + diff --git a/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/manifest.xml b/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/manifest.xml index a4d2d82b04..df0726b637 100644 --- a/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/manifest.xml +++ b/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/manifest.xml @@ -1,10 +1,10 @@ - + - - + + diff --git a/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/odfe_sql_odbc_dev.taco b/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/odfe_sql_odbc_dev.taco new file mode 100644 index 0000000000..1eb7797c04 Binary files /dev/null and b/sql-odbc/src/TableauConnector/odfe_sql_odbc_dev/odfe_sql_odbc_dev.taco differ diff --git a/sql-odbc/src/installer/Resources/README.txt b/sql-odbc/src/installer/Resources/README.txt index 65afeb49ee..c311b0cbbc 100644 --- a/sql-odbc/src/installer/Resources/README.txt +++ b/sql-odbc/src/installer/Resources/README.txt @@ -1,10 +1,10 @@ -All files are available in '/usr/local/lib/odfe-sql-odbc' after installation. +All files are available in '/Library/ODBC/odfe-sql-odbc' after installation. To setup a connection, you can use DSN to store your data source connection information, 1. Open 'iODBC Data Source Administrator'. 2. Go to 'User DSN'. 3. Select 'ODFE SQL ODBC DSN' and click on 'Configure'. -4. Update the connection string values. For the list of all supported options, check '/usr/local/lib/odfe-sql-odbc/doc/README.md'. +4. Update the connection string values. For the list of all supported options, check '/Library/ODBC/odfe-sql-odbc/doc/README.md'. 5. Click 'Ok' to save changes. If using with ODBC compatible BI tools, refer to the tool documentation on configuring a new ODBC driver. The typical requirement is to make the tool aware of the location of the driver library file and then use it to setup database (i.e Elasticsearch) connections. @@ -16,4 +16,4 @@ For example, if you want to use Tableau with Elasticsearch Server, 4. Click on 'Connect'. All connection attributes will be retrived. 5. Click on 'Sign In'. You will be successfully connected to elasticsearch server. -For more details, check 'https://github.com/opendistro-for-elasticsearch/sql-odbc'. \ No newline at end of file +For more details, check 'https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-odbc'. \ No newline at end of file diff --git a/sql-odbc/src/installer/postinstall b/sql-odbc/src/installer/postinstall index aed11b42c0..d1205cd15b 100644 --- a/sql-odbc/src/installer/postinstall +++ b/sql-odbc/src/installer/postinstall @@ -1,7 +1,7 @@ #!/bin/bash PKG_INSTALL_DIR=/Applications -FINAL_INSTALL_DIR=/usr/local/lib/odfe-sql-odbc +FINAL_INSTALL_DIR=/Library/ODBC/odfe-sql-odbc # Remove install directory if it already exists if [ -d "${FINAL_INSTALL_DIR}" ]; then diff --git a/sql-odbc/src/odfesqlodbc/connection.c b/sql-odbc/src/odfesqlodbc/connection.c index afb8cdc1d5..4c151ac3b6 100644 --- a/sql-odbc/src/odfesqlodbc/connection.c +++ b/sql-odbc/src/odfesqlodbc/connection.c @@ -636,26 +636,26 @@ void CC_log_error(const char *func, const char *desc, #define NULLCHECK(a) (a ? a : "(NULL)") if (self) { - MYLOG(ES_ERROR, "CONN ERROR: func=%s, desc='%s', errnum=%d, errmsg='%s'\n", - func, desc, self->__error_number, - NULLCHECK(self->__error_message)); + MYLOG(ES_ERROR, + "CONN ERROR: func=%s, desc='%s', errnum=%d, errmsg='%s'\n", func, + desc, self->__error_number, NULLCHECK(self->__error_message)); MYLOG(ES_ERROR, " " "------------------------------------------------------------\n"); MYLOG(ES_ERROR, " henv=%p, conn=%p, status=%u, num_stmts=%d\n", self->henv, self, self->status, self->num_stmts); - MYLOG(ES_ERROR, - " esconn=%p, stmts=%p, lobj_type=%d\n", self->esconn, - self->stmts, self->lobj_type); + MYLOG(ES_ERROR, " esconn=%p, stmts=%p, lobj_type=%d\n", + self->esconn, self->stmts, self->lobj_type); } else { - MYLOG(ES_ERROR, "INVALID CONNECTION HANDLE ERROR: func=%s, desc='%s'\n", func, - desc); + MYLOG(ES_ERROR, "INVALID CONNECTION HANDLE ERROR: func=%s, desc='%s'\n", + func, desc); } } const char *CurrCat(const ConnectionClass *conn) { - return conn->cluster_name; + UNUSED(conn); + return NULL; } const char *CurrCatString(const ConnectionClass *conn) { diff --git a/sql-odbc/src/odfesqlodbc/es_communication.cpp b/sql-odbc/src/odfesqlodbc/es_communication.cpp index cf25b070a7..b18af24f33 100644 --- a/sql-odbc/src/odfesqlodbc/es_communication.cpp +++ b/sql-odbc/src/odfesqlodbc/es_communication.cpp @@ -40,6 +40,8 @@ static const std::string OPENDISTRO_SQL_PLUGIN_NAME = "opendistro_sql"; static const std::string ALLOCATION_TAG = "AWS_SIGV4_AUTH"; static const std::string SERVICE_NAME = "es"; static const std::string ESODBC_PROFILE_NAME = "elasticsearchodbc"; +static const std::string ERROR_MSG_PREFIX = + "[Open Distro For Elasticsearch][SQL ODBC Driver][SQL Plugin] "; static const std::string JSON_SCHEMA = "{" // This was generated from the example elasticsearch data "\"type\": \"object\"," @@ -79,6 +81,33 @@ static const std::string CURSOR_JSON_SCHEMA = "}," "\"required\": [\"datarows\"]" "}"; +static const std::string ERROR_RESPONSE_SCHEMA = R"EOF( +{ + "type": "object", + "properties": { + "error": { + "type": "object", + "properties": { + "reason": { "type": "string" }, + "details": { "type": "string" }, + "type": { "type": "string" } + }, + "required": [ + "reason", + "details", + "type" + ] + }, + "status": { + "type": "integer" + } + }, + "required": [ + "error", + "status" + ] +} +)EOF"; void ESCommunication::AwsHttpResponseToString( std::shared_ptr< Aws::Http::HttpResponse > response, std::string& output) { @@ -118,6 +147,32 @@ void ESCommunication::PrepareCursorResult(ESResult& es_result) { } } +std::shared_ptr< ErrorDetails > ESCommunication::ParseErrorResponse( + ESResult& es_result) { + // Prepare document and validate schema + try { + LogMsg(ES_DEBUG, "Parsing error response (with schema validation)"); + es_result.es_result_doc.parse(es_result.result_json, + ERROR_RESPONSE_SCHEMA); + + auto error_details = std::make_shared< ErrorDetails >(); + error_details->reason = + es_result.es_result_doc["error"]["reason"].as_string(); + error_details->details = + es_result.es_result_doc["error"]["details"].as_string(); + error_details->source_type = + es_result.es_result_doc["error"]["type"].as_string(); + return error_details; + } catch (const rabbit::parse_error& e) { + // The exception rabbit gives is quite useless - providing the json + // will aid debugging for users + std::string str = "Exception obtained '" + std::string(e.what()) + + "' when parsing json string '" + + es_result.result_json + "'."; + throw std::runtime_error(str.c_str()); + } +} + void ESCommunication::GetJsonSchema(ESResult& es_result) { // Prepare document and validate schema try { @@ -139,6 +194,7 @@ ESCommunication::ESCommunication() #pragma clang diagnostic ignored "-Wreorder" #endif // __APPLE__ : m_status(ConnStatusType::CONNECTION_BAD), + m_error_type(ConnErrorType::CONN_ERROR_SUCCESS), m_valid_connection_options(false), m_is_retrieving(false), m_error_message(""), @@ -159,7 +215,14 @@ ESCommunication::~ESCommunication() { std::string ESCommunication::GetErrorMessage() { // TODO #35 - Check if they expect NULL or "" when there is no error. - return m_error_message; + m_error_details->details = std::regex_replace(m_error_details->details, + std::regex("\\n"), "\\\\n"); + return ERROR_MSG_PREFIX + m_error_details->reason + ": " + + m_error_details->details; +} + +ConnErrorType ESCommunication::GetErrorType() { + return m_error_type; } bool ESCommunication::ConnectionOptions(runtime_options& rt_opts, @@ -180,6 +243,7 @@ bool ESCommunication::ConnectDBStart() { LogMsg(ES_ALL, "Starting DB connection."); m_status = ConnStatusType::CONNECTION_BAD; if (!m_valid_connection_options) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Invalid connection options, unable to connect to DB."; LogMsg(ES_ERROR, m_error_message.c_str()); @@ -189,6 +253,7 @@ bool ESCommunication::ConnectDBStart() { m_status = ConnStatusType::CONNECTION_NEEDED; if (!EstablishConnection()) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Failed to establish connection to DB."; LogMsg(ES_ERROR, m_error_message.c_str()); DropDBConnection(); @@ -222,14 +287,17 @@ bool ESCommunication::CheckConnectionOptions() { if (m_rt_opts.auth.auth_type == AUTHTYPE_BASIC) { if (m_rt_opts.auth.username.empty() || m_rt_opts.auth.password.empty()) { + m_error_type = ConnErrorType::CONN_ERROR_INVALID_AUTH; m_error_message = AUTHTYPE_BASIC " authentication requires a username and password."; } } else { + m_error_type = ConnErrorType::CONN_ERROR_INVALID_AUTH; m_error_message = "Unknown authentication type: '" + m_rt_opts.auth.auth_type + "'"; } } else if (m_rt_opts.conn.server == "") { + m_error_type = ConnErrorType::CONN_ERROR_UNABLE_TO_ESTABLISH; m_error_message = "Host connection option was not specified."; } @@ -309,9 +377,9 @@ std::shared_ptr< Aws::Http::HttpResponse > ESCommunication::IssueRequest( request->SetAuthorization("Basic " + hashed_userpw); } else if (m_rt_opts.auth.auth_type == AUTHTYPE_IAM) { std::shared_ptr< Aws::Auth::ProfileConfigFileAWSCredentialsProvider > - credential_provider = - Aws::MakeShared< Aws::Auth::ProfileConfigFileAWSCredentialsProvider >( - ALLOCATION_TAG.c_str(), ESODBC_PROFILE_NAME.c_str()); + credential_provider = Aws::MakeShared< + Aws::Auth::ProfileConfigFileAWSCredentialsProvider >( + ALLOCATION_TAG.c_str(), ESODBC_PROFILE_NAME.c_str()); Aws::Client::AWSAuthV4Signer signer(credential_provider, SERVICE_NAME.c_str(), m_rt_opts.auth.region.c_str()); @@ -340,6 +408,7 @@ bool ESCommunication::IsSQLPluginInstalled(const std::string& plugin_response) { return true; } } else { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Could not find all necessary fields in the plugin " "response object. " @@ -348,15 +417,19 @@ bool ESCommunication::IsSQLPluginInstalled(const std::string& plugin_response) { } } } catch (const rabbit::type_mismatch& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing endpoint response: " + std::string(e.what()); } catch (const rabbit::parse_error& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing endpoint response: " + std::string(e.what()); } catch (const std::exception& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing endpoint response: " + std::string(e.what()); } catch (...) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Unknown exception thrown when parsing plugin endpoint response."; } @@ -379,12 +452,14 @@ bool ESCommunication::EstablishConnection() { IssueRequest(PLUGIN_ENDPOINT_FORMAT_JSON, Aws::Http::HttpMethod::HTTP_GET, "", "", ""); if (response == nullptr) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "The SQL plugin must be installed in order to use this driver. " "Received NULL response."; } else { AwsHttpResponseToString(response, m_response_str); if (response->GetResponseCode() != Aws::Http::HttpResponseCode::OK) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "The SQL plugin must be installed in order to use this driver."; if (response->HasClientError()) @@ -396,6 +471,7 @@ bool ESCommunication::EstablishConnection() { if (IsSQLPluginInstalled(m_response_str)) { return true; } else { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "The SQL plugin must be installed in order to use this " "driver. Response body: '" @@ -407,12 +483,79 @@ bool ESCommunication::EstablishConnection() { return false; } +std::vector< std::string > ESCommunication::GetColumnsWithSelectQuery( + const std::string table_name) { + std::vector< std::string > list_of_column; + if (table_name.empty()) { + m_error_type = ConnErrorType::CONN_ERROR_INVALID_NULL_PTR; + m_error_message = "Query is NULL"; + LogMsg(ES_ERROR, m_error_message.c_str()); + return list_of_column; + } + + // Prepare query + std::string query = "SELECT * FROM " + table_name + " LIMIT 0"; + std::string msg = "Attempting to execute a query \"" + query + "\""; + LogMsg(ES_DEBUG, msg.c_str()); + + // Issue request + std::shared_ptr< Aws::Http::HttpResponse > response = + IssueRequest(SQL_ENDPOINT_FORMAT_JDBC, Aws::Http::HttpMethod::HTTP_POST, + ctype, query); + + // Validate response + if (response == nullptr) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; + m_error_message = + "Failed to receive response from query. " + "Received NULL response."; + LogMsg(ES_ERROR, m_error_message.c_str()); + return list_of_column; + } + + // Convert body from Aws IOStream to string + std::unique_ptr< ESResult > result = std::make_unique< ESResult >(); + AwsHttpResponseToString(response, result->result_json); + + // If response was not valid, set error + if (response->GetResponseCode() != Aws::Http::HttpResponseCode::OK) { + m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; + m_error_message = + "Http response code was not OK. Code received: " + + std::to_string(static_cast< long >(response->GetResponseCode())) + + "."; + if (response->HasClientError()) + m_error_message += + " Client error: '" + response->GetClientErrorMessage() + "'."; + if (!result->result_json.empty()) { + m_error_message += + " Response error: '" + result->result_json + "'."; + } + LogMsg(ES_ERROR, m_error_message.c_str()); + return list_of_column; + } + + GetJsonSchema(*result); + + rabbit::array schema_array = result->es_result_doc["schema"]; + for (rabbit::array::iterator it = schema_array.begin(); + it != schema_array.end(); ++it) { + std::string column_name = it->at("name").as_string(); + list_of_column.push_back(column_name); + } + + return list_of_column; +} + int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { + m_error_details.reset(); if (!query) { + m_error_type = ConnErrorType::CONN_ERROR_INVALID_NULL_PTR; m_error_message = "Query is NULL"; LogMsg(ES_ERROR, m_error_message.c_str()); return -1; } else if (!m_http_client) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Unable to connect. Please try connecting again."; LogMsg(ES_ERROR, m_error_message.c_str()); return -1; @@ -431,6 +574,7 @@ int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { // Validate response if (response == nullptr) { + m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Failed to receive response from query. " "Received NULL response."; @@ -444,6 +588,7 @@ int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { // If response was not valid, set error if (response->GetResponseCode() != Aws::Http::HttpResponseCode::OK) { + m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Http response code was not OK. Code received: " + std::to_string(static_cast< long >(response->GetResponseCode())) @@ -452,6 +597,7 @@ int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { m_error_message += " Client error: '" + response->GetClientErrorMessage() + "'."; if (!result->result_json.empty()) { + m_error_details = ParseErrorResponse(*result.get()); m_error_message += " Response error: '" + result->result_json + "'."; } @@ -463,7 +609,9 @@ int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { try { ConstructESResult(*result); } catch (std::runtime_error& e) { - m_error_message = "Received runtime exception: " + std::string(e.what()); + m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; + m_error_message = + "Received runtime exception: " + std::string(e.what()); if (!result->result_json.empty()) { m_error_message += " Result body: " + result->result_json; } @@ -481,10 +629,9 @@ int ESCommunication::ExecDirect(const char* query, const char* fetch_size_) { result.release(); if (!cursor.empty()) { - // If the response has a cursor, this thread will retrieve more result pages asynchronously. - std::thread([&, cursor]() { - SendCursorQueries(cursor); - }).detach(); + // If the response has a cursor, this thread will retrieve more result + // pages asynchronously. + std::thread([&, cursor]() { SendCursorQueries(cursor); }).detach(); } return 0; @@ -502,6 +649,7 @@ void ESCommunication::SendCursorQueries(std::string cursor) { SQL_ENDPOINT_FORMAT_JDBC, Aws::Http::HttpMethod::HTTP_POST, ctype, "", "", cursor); if (response == nullptr) { + m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Failed to receive response from cursor. " "Received NULL response."; @@ -509,7 +657,7 @@ void ESCommunication::SendCursorQueries(std::string cursor) { return; } - std::unique_ptr result = std::make_unique(); + std::unique_ptr< ESResult > result = std::make_unique< ESResult >(); AwsHttpResponseToString(response, result->result_json); PrepareCursorResult(*result); @@ -525,10 +673,12 @@ void ESCommunication::SendCursorQueries(std::string cursor) { && !m_result_queue.push(QUEUE_TIMEOUT, result.get())) { } - // Don't release when attempting to push to the queue as it may take multiple tries. + // Don't release when attempting to push to the queue as it may take + // multiple tries. result.release(); } } catch (std::runtime_error& e) { + m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Received runtime exception: " + std::string(e.what()); LogMsg(ES_ERROR, m_error_message.c_str()); @@ -546,6 +696,7 @@ void ESCommunication::SendCloseCursorRequest(const std::string& cursor) { IssueRequest(SQL_ENDPOINT_CLOSE_CURSOR, Aws::Http::HttpMethod::HTTP_POST, ctype, "", "", cursor); if (response == nullptr) { + m_error_type = ConnErrorType::CONN_ERROR_QUERY_SYNTAX; m_error_message = "Failed to receive response from cursor. " "Received NULL response."; @@ -631,6 +782,7 @@ std::string ESCommunication::GetServerVersion() { std::shared_ptr< Aws::Http::HttpResponse > response = IssueRequest("", Aws::Http::HttpMethod::HTTP_GET, "", "", ""); if (response == nullptr) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Failed to receive response from query. " "Received NULL response."; @@ -649,14 +801,17 @@ std::string ESCommunication::GetServerVersion() { } } catch (const rabbit::type_mismatch& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const rabbit::parse_error& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const std::exception& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); LogMsg(ES_ERROR, m_error_message.c_str()); @@ -679,6 +834,7 @@ std::string ESCommunication::GetClusterName() { std::shared_ptr< Aws::Http::HttpResponse > response = IssueRequest("", Aws::Http::HttpMethod::HTTP_GET, "", "", ""); if (response == nullptr) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Failed to receive response from query. " "Received NULL response."; @@ -697,14 +853,17 @@ std::string ESCommunication::GetClusterName() { } } catch (const rabbit::type_mismatch& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const rabbit::parse_error& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); LogMsg(ES_ERROR, m_error_message.c_str()); } catch (const std::exception& e) { + m_error_type = ConnErrorType::CONN_ERROR_COMM_LINK_FAILURE; m_error_message = "Error parsing main endpoint response: " + std::string(e.what()); LogMsg(ES_ERROR, m_error_message.c_str()); diff --git a/sql-odbc/src/odfesqlodbc/es_communication.h b/sql-odbc/src/odfesqlodbc/es_communication.h index 38ca25b1de..ca8f623199 100644 --- a/sql-odbc/src/odfesqlodbc/es_communication.h +++ b/sql-odbc/src/odfesqlodbc/es_communication.h @@ -21,6 +21,7 @@ #include #include #include +#include #include "es_types.h" #include "es_result_queue.h" @@ -50,6 +51,7 @@ class ESCommunication { // Create function for factory std::string GetErrorMessage(); + ConnErrorType GetErrorType(); bool ConnectionOptions(runtime_options& rt_opts, bool use_defaults, int expand_dbname, unsigned int option_count); bool ConnectionOptions2(); @@ -74,6 +76,8 @@ class ESCommunication { std::string& output); void SendCloseCursorRequest(const std::string& cursor); void StopResultRetrieval(); + std::vector< std::string > GetColumnsWithSelectQuery( + const std::string table_name); private: void InitializeConnection(); @@ -82,12 +86,15 @@ class ESCommunication { void ConstructESResult(ESResult& result); void GetJsonSchema(ESResult& es_result); void PrepareCursorResult(ESResult& es_result); + std::shared_ptr< ErrorDetails > ParseErrorResponse(ESResult& es_result); // TODO #35 - Go through and add error messages on exit conditions - std::string m_error_message; + std::string m_error_message; const std::vector< std::string > m_supported_client_encodings = {"UTF8"}; ConnStatusType m_status; + ConnErrorType m_error_type; + std::shared_ptr< ErrorDetails > m_error_details; bool m_valid_connection_options; bool m_is_retrieving; ESResultQueue m_result_queue; diff --git a/sql-odbc/src/odfesqlodbc/es_helper.cpp b/sql-odbc/src/odfesqlodbc/es_helper.cpp index cf243137ce..529ebfe958 100644 --- a/sql-odbc/src/odfesqlodbc/es_helper.cpp +++ b/sql-odbc/src/odfesqlodbc/es_helper.cpp @@ -57,6 +57,11 @@ std::string GetErrorMsg(void* es_conn) { : NULL; } +ConnErrorType GetErrorType(void* es_conn) { + return es_conn ? static_cast< ESCommunication* >(es_conn)->GetErrorType() + : ConnErrorType::CONN_ERROR_SUCCESS; +} + std::string GetServerVersion(void* es_conn) { return es_conn ? static_cast< ESCommunication* >(es_conn)->GetServerVersion() @@ -64,9 +69,8 @@ std::string GetServerVersion(void* es_conn) { } std::string GetClusterName(void* es_conn) { - return es_conn - ? static_cast< ESCommunication* >(es_conn)->GetClusterName() - : ""; + return es_conn ? static_cast< ESCommunication* >(es_conn)->GetClusterName() + : ""; } void* InitializeESConn() { @@ -111,7 +115,13 @@ void ESClearResult(ESResult* es_result) { } void ESStopRetrieval(void* es_conn) { - static_cast< ESCommunication* >(es_conn)->StopResultRetrieval(); + static_cast< ESCommunication* >(es_conn)->StopResultRetrieval(); +} + +std::vector< std::string > ESGetColumnsWithSelectQuery( + void* es_conn, const std::string table_name) { + return static_cast< ESCommunication* >(es_conn)->GetColumnsWithSelectQuery( + table_name); } // This class provides a cross platform way of entering critical sections diff --git a/sql-odbc/src/odfesqlodbc/es_helper.h b/sql-odbc/src/odfesqlodbc/es_helper.h index 2328b07d7a..9aa05f9302 100644 --- a/sql-odbc/src/odfesqlodbc/es_helper.h +++ b/sql-odbc/src/odfesqlodbc/es_helper.h @@ -30,6 +30,9 @@ void* ESConnectDBParams(runtime_options& rt_opts, int expand_dbname, std::string GetServerVersion(void* es_conn); std::string GetClusterName(void* es_conn); std::string GetErrorMsg(void* es_conn); +ConnErrorType GetErrorType(void* es_conn); +std::vector< std::string > ESGetColumnsWithSelectQuery( + void* es_conn, const std::string table_name); // C Interface extern "C" { diff --git a/sql-odbc/src/odfesqlodbc/es_info.cpp b/sql-odbc/src/odfesqlodbc/es_info.cpp index 4c037f1ba9..648a0d967f 100644 --- a/sql-odbc/src/odfesqlodbc/es_info.cpp +++ b/sql-odbc/src/odfesqlodbc/es_info.cpp @@ -83,6 +83,22 @@ const std::unordered_map< int, std::vector< int > > sql_es_type_map = { {ES_TYPE_KEYWORD, ES_TYPE_TEXT, ES_TYPE_NESTED, ES_TYPE_OBJECT}}, {SQL_TYPE_TIMESTAMP, {ES_TYPE_DATETIME}}}; +const std::unordered_map< std::string, int > data_name_data_type_map = { + {ES_TYPE_NAME_BOOLEAN, SQL_BIT}, + {ES_TYPE_NAME_BYTE, SQL_TINYINT}, + {ES_TYPE_NAME_SHORT, SQL_SMALLINT}, + {ES_TYPE_NAME_INTEGER, SQL_INTEGER}, + {ES_TYPE_NAME_LONG, SQL_BIGINT}, + {ES_TYPE_NAME_HALF_FLOAT, SQL_REAL}, + {ES_TYPE_NAME_FLOAT, SQL_REAL}, + {ES_TYPE_NAME_DOUBLE, SQL_DOUBLE}, + {ES_TYPE_NAME_SCALED_FLOAT, SQL_DOUBLE}, + {ES_TYPE_NAME_KEYWORD, SQL_WVARCHAR}, + {ES_TYPE_NAME_TEXT, SQL_WVARCHAR}, + {ES_TYPE_NAME_DATE, SQL_TYPE_TIMESTAMP}, + {ES_TYPE_NAME_OBJECT, SQL_WVARCHAR}, + {ES_TYPE_NAME_NESTED, SQL_WVARCHAR}}; + // Boilerplate code for easy column bind handling class BindTemplate { public: @@ -262,7 +278,7 @@ class BindTemplateSQLCHAR : public BindTemplate { } void UpdateData(SQLPOINTER new_data, size_t size) { m_data.clear(); - SQLCHAR *data = (SQLCHAR *)new_data; + SQLCHAR *data = reinterpret_cast< SQLCHAR * >(new_data); for (size_t i = 0; i < size; i++) { m_data.push_back(*data++); } @@ -306,8 +322,8 @@ void GetCatalogData(const std::string &query, StatementClass *stmt, StatementClass *sub_stmt, const TableResultSet res_type, std::string &table_type, void (*populate_binds)(bind_vector &), - void (*setup_qres_info)(QResultClass *, - EnvironmentClass *)); + void (*setup_qres_info)(QResultClass *, EnvironmentClass *), + std::vector< std::string > *list_of_columns = NULL); // Common function declarations void ConvertToString(std::string &out, bool &valid, const SQLCHAR *sql_char, @@ -394,7 +410,8 @@ void AssignTableBindTemplates(bind_vector &tabs); void SetupTableQResInfo(QResultClass *res, EnvironmentClass *env); void SetTableTuples(QResultClass *res, const TableResultSet res_type, const bind_vector &bind_tbl, std::string &table_type, - StatementClass *stmt, StatementClass *tbl_stmt); + StatementClass *stmt, StatementClass *tbl_stmt, + std::vector< std::string > *list_of_columns = NULL); // Table specific function declarations void split(const std::string &input, const std::string &delim, @@ -457,7 +474,8 @@ void SetupTableQResInfo(QResultClass *res, EnvironmentClass *env) { void SetTableTuples(QResultClass *res, const TableResultSet res_type, const bind_vector &bind_tbl, std::string &table_type, - StatementClass *stmt, StatementClass *tbl_stmt) { + StatementClass *stmt, StatementClass *tbl_stmt, + std::vector< std::string > *list_of_columns) { auto CheckResult = [&](const auto &res) { if (res != SQL_NO_DATA_FOUND) { SC_full_error_copy(stmt, tbl_stmt, FALSE); @@ -469,19 +487,58 @@ void SetTableTuples(QResultClass *res, const TableResultSet res_type, }; auto AssignData = [&](auto *res, const auto &binds) { TupleField *tuple = QR_AddNew(res); - for (size_t i = 0; i < binds.size(); i++) - binds[i]->AssignData(&tuple[i]); + // Since we do not support catalogs, we will return an empty string for + // catalog names. This is required for Excel for Mac, which uses this + // information for its Data Preview window. + std::string catalog(""); + bind_tbl[TABLES_CATALOG_NAME]->UpdateData((void *)catalog.c_str(), 0); + + // TODO #630 - Revisit logic of adding tuples for SQLTables & SQLColumns + for (size_t i = 0; i < binds.size(); i++) { + // Add tuples for SQLColumns + if (binds.size() > COLUMNS_SQL_DATA_TYPE) { + // Add data type for data loading issue in Power BI Desktop + auto data_type = data_name_data_type_map + .find(bind_tbl[COLUMNS_TYPE_NAME]->AsString())->second; + if (i == COLUMNS_DATA_TYPE) { + set_tuplefield_int2(&tuple[COLUMNS_DATA_TYPE], + static_cast< short >(data_type)); + } else if (i == COLUMNS_SQL_DATA_TYPE) { + set_tuplefield_int2(&tuple[COLUMNS_SQL_DATA_TYPE], + static_cast< short >(data_type)); + } else { + binds[i]->AssignData(&tuple[i]); + } + } + // Add tuples for SQLTables + else { + binds[i]->AssignData(&tuple[i]); + } + } }; // General case if (res_type == TableResultSet::All) { RETCODE result = SQL_NO_DATA_FOUND; + int ordinal_position = 0; while (SQL_SUCCEEDED(result = ESAPI_Fetch(tbl_stmt))) { if (bind_tbl[TABLES_TABLE_TYPE]->AsString() == "BASE TABLE") { std::string table("TABLE"); - bind_tbl[TABLES_TABLE_TYPE]->UpdateData(&table, table.size()); + bind_tbl[TABLES_TABLE_TYPE]->UpdateData((void *)table.c_str(), + table.length()); + } + if (list_of_columns != NULL && !list_of_columns->empty()) { + if (std::find(list_of_columns->begin(), list_of_columns->end(), + bind_tbl[COLUMNS_COLUMN_NAME]->AsString()) + != list_of_columns->end()) { + ordinal_position++; + bind_tbl[COLUMNS_ORDINAL_POSITION]->UpdateData( + &ordinal_position, 0); + AssignData(res, bind_tbl); + } + } else { + AssignData(res, bind_tbl); } - AssignData(res, bind_tbl); } CheckResult(result); } else if (res_type == TableResultSet::TableLookUp) { @@ -498,7 +555,8 @@ void SetTableTuples(QResultClass *res, const TableResultSet res_type, // Replace BASE TABLE with TABLE for Excel & Power BI SQLTables call if (bind_tbl[TABLES_TABLE_TYPE]->AsString() == "BASE TABLE") { std::string table("TABLE"); - bind_tbl[TABLES_TABLE_TYPE]->UpdateData(&table, table.size()); + bind_tbl[TABLES_TABLE_TYPE]->UpdateData((void *)table.c_str(), + table.length()); } if (std::find(table_types.begin(), table_types.end(), bind_tbl[TABLES_TABLE_TYPE]->AsString()) @@ -524,12 +582,6 @@ void SetTableTuples(QResultClass *res, const TableResultSet res_type, // Get index of result type of interest size_t idx = NUM_OF_TABLES_FIELDS; switch (res_type) { - case TableResultSet::Catalog: - idx = TABLES_CATALOG_NAME; - break; - case TableResultSet::Schema: - idx = TABLES_SCHEMA_NAME; - break; case TableResultSet::TableTypes: idx = TABLES_TABLE_TYPE; break; @@ -639,8 +691,8 @@ void GetCatalogData(const std::string &query, StatementClass *stmt, StatementClass *sub_stmt, const TableResultSet res_type, std::string &table_type, void (*populate_binds)(bind_vector &), - void (*setup_qres_info)(QResultClass *, - EnvironmentClass *)) { + void (*setup_qres_info)(QResultClass *, EnvironmentClass *), + std::vector< std::string > *list_of_columns) { // Execute query ExecuteQuery(SC_get_conn(stmt), reinterpret_cast< HSTMT * >(&sub_stmt), query); @@ -656,8 +708,8 @@ void GetCatalogData(const std::string &query, StatementClass *stmt, // Setup QResultClass (*setup_qres_info)( res, static_cast< EnvironmentClass * >(CC_get_env(SC_get_conn(stmt)))); - SetTableTuples(res, res_type, binds, table_type, stmt, sub_stmt); - + SetTableTuples(res, res_type, binds, table_type, stmt, sub_stmt, + list_of_columns); CleanUp(stmt, sub_stmt, SQL_SUCCESS); } @@ -692,20 +744,32 @@ ESAPI_Tables(HSTMT hstmt, const SQLCHAR *catalog_name_sql, if (catalog_name == SQL_ALL_CATALOGS) { if (schema_valid && table_valid && (table_name == "") - && (schema_name == "")) - result_type = TableResultSet::Catalog; - } + && (schema_name == "")) { + std::string error_msg("Catalogs not supported."); + SC_set_error(stmt, STMT_NOT_IMPLEMENTED_ERROR, + error_msg.c_str(), func); + CleanUp(stmt, tbl_stmt); + return SQL_ERROR; + } + // result_type = TableResultSet::Catalog; + } if (schema_name == SQL_ALL_SCHEMAS) { if (catalog_valid && table_valid && (table_name == "") - && (catalog_name == "")) - result_type = TableResultSet::Schema; - } + && (catalog_name == "")) { + std::string error_msg("Schemas not supported."); + SC_set_error(stmt, STMT_NOT_IMPLEMENTED_ERROR, + error_msg.c_str(), func); + CleanUp(stmt, tbl_stmt); + return SQL_ERROR; + } + // result_type = TableResultSet::Schema; + } if (table_type_valid && (table_type == SQL_ALL_TABLE_TYPES)) { if (catalog_valid && table_valid && schema_valid && (table_name == "") && (catalog_name == "") && (schema_name == "")) result_type = TableResultSet::TableTypes; - } + } if (table_type_valid && (table_type != SQL_ALL_TABLE_TYPES)) { result_type = TableResultSet::TableLookUp; } @@ -771,12 +835,22 @@ ESAPI_Columns(HSTMT hstmt, const SQLCHAR *catalog_name_sql, GenerateColumnQuery(query, table_name, column_name, table_valid, column_valid, flag); + // Get list of columns with SELECT * query since columns doesn't match + // with DESCRIBE & SELECT * query + std::vector< std::string > list_of_columns; + if (table_valid) { + ConnectionClass *conn = SC_get_conn(stmt); + list_of_columns = + ESGetColumnsWithSelectQuery(conn->esconn, table_name); + } + // TODO #324 (SQL Plugin)- evaluate catalog & schema support // Execute query std::string table_type = ""; GetCatalogData(query, stmt, col_stmt, TableResultSet::All, table_type, - AssignColumnBindTemplates, SetupColumnQResInfo); + AssignColumnBindTemplates, SetupColumnQResInfo, + &list_of_columns); return SQL_SUCCESS; } catch (std::bad_alloc &e) { std::string error_msg = std::string("Bad allocation exception: '") diff --git a/sql-odbc/src/odfesqlodbc/es_parse_result.cpp b/sql-odbc/src/odfesqlodbc/es_parse_result.cpp index b73e291751..e111bb37f3 100644 --- a/sql-odbc/src/odfesqlodbc/es_parse_result.cpp +++ b/sql-odbc/src/odfesqlodbc/es_parse_result.cpp @@ -69,21 +69,21 @@ static const std::string JSON_KW_CURSOR = "cursor"; // clang-format on const std::unordered_map< std::string, OID > type_to_oid_map = { - {"boolean", ES_TYPE_BOOL}, - {"byte", ES_TYPE_INT2}, - {"short", ES_TYPE_INT2}, - {"integer", ES_TYPE_INT4}, - {"long", ES_TYPE_INT8}, - {"half_float", ES_TYPE_FLOAT4}, - {"float", ES_TYPE_FLOAT4}, - {"double", ES_TYPE_FLOAT8}, - {"scaled_float", ES_TYPE_FLOAT8}, - {"keyword", ES_TYPE_VARCHAR}, - {"text", ES_TYPE_VARCHAR}, - {"date", ES_TYPE_TIMESTAMP}, - {"object", ES_TYPE_VARCHAR}, - {"nested", ES_TYPE_VARCHAR}, - {"date", ES_TYPE_DATE}}; + {ES_TYPE_NAME_BOOLEAN, ES_TYPE_BOOL}, + {ES_TYPE_NAME_BYTE, ES_TYPE_INT2}, + {ES_TYPE_NAME_SHORT, ES_TYPE_INT2}, + {ES_TYPE_NAME_INTEGER, ES_TYPE_INT4}, + {ES_TYPE_NAME_LONG, ES_TYPE_INT8}, + {ES_TYPE_NAME_HALF_FLOAT, ES_TYPE_FLOAT4}, + {ES_TYPE_NAME_FLOAT, ES_TYPE_FLOAT4}, + {ES_TYPE_NAME_DOUBLE, ES_TYPE_FLOAT8}, + {ES_TYPE_NAME_SCALED_FLOAT, ES_TYPE_FLOAT8}, + {ES_TYPE_NAME_KEYWORD, ES_TYPE_VARCHAR}, + {ES_TYPE_NAME_TEXT, ES_TYPE_VARCHAR}, + {ES_TYPE_NAME_DATE, ES_TYPE_TIMESTAMP}, + {ES_TYPE_NAME_OBJECT, ES_TYPE_VARCHAR}, + {ES_TYPE_NAME_VARCHAR, ES_TYPE_VARCHAR}, + {ES_TYPE_NAME_DATE, ES_TYPE_DATE}}; #define ES_VARCHAR_SIZE (-2) const std::unordered_map< OID, int16_t > oid_to_size_map = { diff --git a/sql-odbc/src/odfesqlodbc/es_statement.cpp b/sql-odbc/src/odfesqlodbc/es_statement.cpp index fccbfca2af..afedd5e066 100644 --- a/sql-odbc/src/odfesqlodbc/es_statement.cpp +++ b/sql-odbc/src/odfesqlodbc/es_statement.cpp @@ -68,16 +68,22 @@ RETCODE ExecuteStatement(StatementClass *stmt, BOOL commit) { QResultClass *res = SendQueryGetResult(stmt, commit); if (!res) { std::string es_conn_err = GetErrorMsg(SC_get_conn(stmt)->esconn); + ConnErrorType es_err_type = GetErrorType(SC_get_conn(stmt)->esconn); std::string es_parse_err = GetResultParserError(); if (!es_conn_err.empty()) { - SC_set_error(stmt, STMT_NO_RESPONSE, es_conn_err.c_str(), func); + if (es_err_type == ConnErrorType::CONN_ERROR_QUERY_SYNTAX) { + SC_set_error(stmt, STMT_QUERY_SYNTAX_ERROR, es_conn_err.c_str(), + func); + } else { + SC_set_error(stmt, STMT_NO_RESPONSE, es_conn_err.c_str(), func); + } } else if (!es_parse_err.empty()) { SC_set_error(stmt, STMT_EXEC_ERROR, es_parse_err.c_str(), func); } else if (SC_get_errornumber(stmt) <= 0) { - SC_set_error( - stmt, STMT_NO_RESPONSE, - "Failed to retrieve error message from result. Connection may be down.", - func); + SC_set_error(stmt, STMT_NO_RESPONSE, + "Failed to retrieve error message from result. " + "Connection may be down.", + func); } return CleanUp(); } @@ -160,8 +166,8 @@ SQLRETURN GetNextResultSet(StatementClass *stmt) { } SQLSMALLINT total_columns = -1; - if (!SQL_SUCCEEDED(SQLNumResultCols(stmt, &total_columns)) || - (total_columns == -1)) { + if (!SQL_SUCCEEDED(SQLNumResultCols(stmt, &total_columns)) + || (total_columns == -1)) { return SQL_ERROR; } @@ -175,7 +181,7 @@ SQLRETURN GetNextResultSet(StatementClass *stmt) { QR_set_server_cursor_id(q_res, NULL); } - // Responsible for looping through rows, allocating tuples and + // Responsible for looping through rows, allocating tuples and // appending these rows in q_result CC_Append_Table_Data(es_res->es_result_doc, q_res, total_columns, *(q_res->fields)); @@ -239,7 +245,8 @@ QResultClass *SendQueryGetResult(StatementClass *stmt, BOOL commit) { // Send command ConnectionClass *conn = SC_get_conn(stmt); - if (ESExecDirect(conn->esconn, stmt->statement, conn->connInfo.fetch_size) != 0) { + if (ESExecDirect(conn->esconn, stmt->statement, conn->connInfo.fetch_size) + != 0) { QR_Destructor(res); return NULL; } diff --git a/sql-odbc/src/odfesqlodbc/es_types.h b/sql-odbc/src/odfesqlodbc/es_types.h index 596534017d..dcb73398c2 100644 --- a/sql-odbc/src/odfesqlodbc/es_types.h +++ b/sql-odbc/src/odfesqlodbc/es_types.h @@ -30,22 +30,22 @@ extern "C" { #define ES_TYPE_LO ? ? ? ? /* waiting for permanent type */ #endif -#define ES_TYPE_NAME_BOOLEAN "boolean"; -#define ES_TYPE_NAME_BYTE "byte"; -#define ES_TYPE_NAME_SHORT "short"; -#define ES_TYPE_NAME_INTEGER "integer"; -#define ES_TYPE_NAME_LONG "long"; -#define ES_TYPE_NAME_HALF_FLOAT "half_float"; -#define ES_TYPE_NAME_FLOAT "float"; -#define ES_TYPE_NAME_DOUBLE "double"; -#define ES_TYPE_NAME_SCALED_FLOAT "scaled_float"; -#define ES_TYPE_NAME_KEYWORD "keyword"; -#define ES_TYPE_NAME_TEXT "text"; -#define ES_TYPE_NAME_NESTED "nested"; -#define ES_TYPE_NAME_DATE "date"; -#define ES_TYPE_NAME_OBJECT "object"; -#define ES_TYPE_NAME_VARCHAR "varchar"; -#define ES_TYPE_NAME_UNSUPPORTED "unsupported"; +#define ES_TYPE_NAME_BOOLEAN "boolean" +#define ES_TYPE_NAME_BYTE "byte" +#define ES_TYPE_NAME_SHORT "short" +#define ES_TYPE_NAME_INTEGER "integer" +#define ES_TYPE_NAME_LONG "long" +#define ES_TYPE_NAME_HALF_FLOAT "half_float" +#define ES_TYPE_NAME_FLOAT "float" +#define ES_TYPE_NAME_DOUBLE "double" +#define ES_TYPE_NAME_SCALED_FLOAT "scaled_float" +#define ES_TYPE_NAME_KEYWORD "keyword" +#define ES_TYPE_NAME_TEXT "text" +#define ES_TYPE_NAME_NESTED "nested" +#define ES_TYPE_NAME_DATE "date" +#define ES_TYPE_NAME_OBJECT "object" +#define ES_TYPE_NAME_VARCHAR "varchar" +#define ES_TYPE_NAME_UNSUPPORTED "unsupported" #define MS_ACCESS_SERIAL "int identity" #define ES_TYPE_BOOL 16 @@ -239,6 +239,15 @@ typedef enum { CONNECTION_GSS_STARTUP /* Negotiating GSSAPI. */ } ConnStatusType; +typedef enum { + CONN_ERROR_SUCCESS, // 0 + CONN_ERROR_QUERY_SYNTAX, // 42000 + CONN_ERROR_COMM_LINK_FAILURE, // 08S01 + CONN_ERROR_INVALID_NULL_PTR, // HY009 + CONN_ERROR_INVALID_AUTH, // 28000 + CONN_ERROR_UNABLE_TO_ESTABLISH // 08001 +} ConnErrorType; + // Only expose this to C++ code, this will be passed through the C interface as // a void* #ifdef __cplusplus @@ -285,6 +294,13 @@ typedef struct runtime_options { encryption_options crypt; } runtime_options; +typedef struct ErrorDetails { + std::string reason; + std::string details; + std::string source_type; + ConnErrorType type; +} ErrorDetails; + #define INVALID_OID 0 #define KEYWORD_TYPE_OID 1043 #define KEYWORD_TYPE_SIZE 255 diff --git a/sql-odbc/src/odfesqlodbc/info.c b/sql-odbc/src/odfesqlodbc/info.c index efae0d7cbf..a8925d4087 100644 --- a/sql-odbc/src/odfesqlodbc/info.c +++ b/sql-odbc/src/odfesqlodbc/info.c @@ -331,14 +331,6 @@ RETCODE SQL_API ESAPI_GetInfo(HDBC hdbc, SQLUSMALLINT fInfoType, case SQL_MAX_OWNER_NAME_LEN: /* ODBC 1.0 */ len = 2; value = 0; - if (ES_VERSION_GT(conn, 7.4)) - value = CC_get_max_idlen(conn); -#ifdef MAX_SCHEMA_LEN - else - value = MAX_SCHEMA_LEN; -#endif /* MAX_SCHEMA_LEN */ - if (0 == value) - value = NAMEDATALEN_V73 - 1; break; case SQL_MAX_PROCEDURE_NAME_LEN: /* ODBC 1.0 */ @@ -487,20 +479,20 @@ RETCODE SQL_API ESAPI_GetInfo(HDBC hdbc, SQLUSMALLINT fInfoType, case SQL_QUALIFIER_LOCATION: /* ODBC 2.0 */ len = 2; - value = SQL_QL_START; + value = 0; break; case SQL_QUALIFIER_NAME_SEPARATOR: /* ODBC 1.0 */ - p = "."; + p = ""; break; case SQL_QUALIFIER_TERM: /* ODBC 1.0 */ - p = "cluster"; + p = ""; break; case SQL_QUALIFIER_USAGE: /* ODBC 2.0 */ len = 4; - value = SQL_CU_DML_STATEMENTS; + value = 0; break; case SQL_QUOTED_IDENTIFIER_CASE: /* ODBC 2.0 */ @@ -672,10 +664,7 @@ RETCODE SQL_API ESAPI_GetInfo(HDBC hdbc, SQLUSMALLINT fInfoType, value = SQL_BS_SELECT_EXPLICIT | SQL_BS_ROW_COUNT_EXPLICIT; break; case SQL_CATALOG_NAME: - if (CurrCat(conn)) - p = "Y"; - else - p = "N"; + p = "N"; break; case SQL_COLLATION_SEQ: p = ""; diff --git a/sql-odbc/src/odfesqlodbc/mylog.c b/sql-odbc/src/odfesqlodbc/mylog.c index 1e50b7b140..ebd93bf81e 100644 --- a/sql-odbc/src/odfesqlodbc/mylog.c +++ b/sql-odbc/src/odfesqlodbc/mylog.c @@ -248,6 +248,8 @@ static void MLOG_open() { char filebuf[80], errbuf[160]; BOOL open_error = FALSE; + // TODO (#585): Add option to log to stderr stream + // MLOGFP = stderr; if (MLOGFP) return; diff --git a/sql-odbc/src/odfesqlodbc/odbcapi.c b/sql-odbc/src/odfesqlodbc/odbcapi.c index a2d1490411..cdbf714719 100644 --- a/sql-odbc/src/odfesqlodbc/odbcapi.c +++ b/sql-odbc/src/odfesqlodbc/odbcapi.c @@ -248,13 +248,13 @@ RETCODE SQL_API SQLDisconnect(HDBC ConnectionHandle) { #ifndef UNICODE_SUPPORTXX RETCODE SQL_API SQLExecDirect(HSTMT StatementHandle, SQLCHAR *StatementText, SQLINTEGER TextLength) { - if(StatementHandle == NULL) + if (StatementHandle == NULL) return SQL_ERROR; StatementClass *stmt = (StatementClass *)StatementHandle; if (SC_connection_lost_check(stmt, __FUNCTION__)) return SQL_ERROR; - + // Enter critical ENTER_STMT_CS(stmt); @@ -274,7 +274,7 @@ RETCODE SQL_API SQLExecDirect(HSTMT StatementHandle, SQLCHAR *StatementText, #endif /* UNICODE_SUPPORTXX */ RETCODE SQL_API SQLExecute(HSTMT StatementHandle) { - if(StatementHandle == NULL) + if (StatementHandle == NULL) return SQL_ERROR; StatementClass *stmt = (StatementClass *)StatementHandle; @@ -468,7 +468,7 @@ RETCODE SQL_API SQLParamData(HSTMT StatementHandle, PTR *Value) { #ifndef UNICODE_SUPPORTXX RETCODE SQL_API SQLPrepare(HSTMT StatementHandle, SQLCHAR *StatementText, SQLINTEGER TextLength) { - if(StatementHandle == NULL) + if (StatementHandle == NULL) return SQL_ERROR; CSTR func = "SQLPrepare"; @@ -1291,3 +1291,109 @@ RETCODE SQL_API SQLBindParameter(HSTMT hstmt, SQLUSMALLINT ipar, "SQLBindParameter"); return SQL_ERROR; } + +/* ODBC 2.x-specific functions */ +// TODO (#590): Add implementations for remaining ODBC 2.x function + +RETCODE SQL_API SQLAllocStmt(SQLHDBC InputHandle, SQLHSTMT *OutputHandle) { + RETCODE ret; + ConnectionClass *conn; + MYLOG(ES_TRACE, "entering\n"); + + conn = (ConnectionClass *)InputHandle; + ENTER_CONN_CS(conn); + ret = ESAPI_AllocStmt( + InputHandle, OutputHandle, + PODBC_EXTERNAL_STATEMENT | PODBC_INHERIT_CONNECT_OPTIONS); + if (*OutputHandle) + ((StatementClass *)(*OutputHandle))->external = 1; + LEAVE_CONN_CS(conn); + + return ret; +} + +#ifndef UNICODE_SUPPORTXX +RETCODE SQL_API SQLGetConnectOption(HDBC ConnectionHandle, SQLUSMALLINT Option, + PTR Value) { + RETCODE ret; + + MYLOG(ES_TRACE, "entering " FORMAT_UINTEGER "\n", Option); + ENTER_CONN_CS((ConnectionClass *)ConnectionHandle); + CC_clear_error((ConnectionClass *)ConnectionHandle); + ret = ESAPI_GetConnectOption(ConnectionHandle, Option, Value, NULL, 0); + LEAVE_CONN_CS((ConnectionClass *)ConnectionHandle); + return ret; +} + +/* SQLSetConnectOption -> SQLSetConnectAttr */ +RETCODE SQL_API SQLSetConnectOption(HDBC ConnectionHandle, SQLUSMALLINT Option, + SQLULEN Value) { + RETCODE ret; + ConnectionClass *conn = (ConnectionClass *)ConnectionHandle; + + MYLOG(ES_TRACE, "entering " FORMAT_INTEGER "\n", Option); + ENTER_CONN_CS(conn); + CC_clear_error(conn); + ret = ESAPI_SetConnectOption(ConnectionHandle, Option, Value); + LEAVE_CONN_CS(conn); + return ret; +} + +/* SQLColAttributes -> SQLColAttribute */ +SQLRETURN SQL_API SQLColAttributes(SQLHSTMT StatementHandle, + SQLUSMALLINT ColumnNumber, + SQLUSMALLINT FieldIdentifier, + SQLPOINTER CharacterAttribute, + SQLSMALLINT BufferLength, + SQLSMALLINT *StringLength, +#if defined(_WIN64) || defined(_WIN32) || defined(SQLCOLATTRIBUTE_SQLLEN) + SQLLEN *NumericAttribute +#else + SQLPOINTER NumericAttribute +#endif +) { + RETCODE ret; + StatementClass *stmt = (StatementClass *)StatementHandle; + + MYLOG(ES_TRACE, "entering\n"); + if (SC_connection_lost_check(stmt, __FUNCTION__)) + return SQL_ERROR; + + ENTER_STMT_CS(stmt); + SC_clear_error(stmt); + ret = ESAPI_ColAttributes(StatementHandle, ColumnNumber, FieldIdentifier, + CharacterAttribute, BufferLength, StringLength, + NumericAttribute); + LEAVE_STMT_CS(stmt); + return ret; +} + +/* SQLError -> SQLDiagRec */ +RETCODE SQL_API SQLError(SQLHENV EnvironmentHandle, SQLHDBC ConnectionHandle, + SQLHSTMT StatementHandle, SQLCHAR *Sqlstate, + SQLINTEGER *NativeError, SQLCHAR *MessageText, + SQLSMALLINT BufferLength, SQLSMALLINT *TextLength) { + RETCODE ret; + SQLSMALLINT RecNumber = 1; + + MYLOG(ES_TRACE, "entering\n"); + + if (StatementHandle) { + ret = + ESAPI_StmtError(StatementHandle, RecNumber, Sqlstate, NativeError, + MessageText, BufferLength, TextLength, 0); + } else if (ConnectionHandle) { + ret = ESAPI_ConnectError(ConnectionHandle, RecNumber, Sqlstate, + NativeError, MessageText, BufferLength, + TextLength, 0); + } else if (EnvironmentHandle) { + ret = ESAPI_EnvError(EnvironmentHandle, RecNumber, Sqlstate, NativeError, + MessageText, BufferLength, TextLength, 0); + } else { + ret = SQL_ERROR; + } + + MYLOG(ES_TRACE, "leaving %d\n", ret); + return ret; +} +#endif /* UNICODE_SUPPORTXX */ diff --git a/sql-odbc/src/odfesqlodbc/odbcapiw.c b/sql-odbc/src/odfesqlodbc/odbcapiw.c index 5a7a8ab92d..9a7e5d66db 100644 --- a/sql-odbc/src/odfesqlodbc/odbcapiw.c +++ b/sql-odbc/src/odfesqlodbc/odbcapiw.c @@ -151,8 +151,8 @@ RETCODE SQL_API SQLDriverConnectW(HDBC hdbc, HWND hwnd, SQLWCHAR *szConnStrIn, utf8_to_ucs2(szOut, maxlen, szConnStrOut, cbConnStrOutMax); if (outlen >= cbConnStrOutMax && NULL != szConnStrOut && NULL != pcbConnStrOut) { - MYLOG(ES_ALL, "cbConnstrOutMax=%d pcb=%p\n", - cbConnStrOutMax, pcbConnStrOut); + MYLOG(ES_ALL, "cbConnstrOutMax=%d pcb=%p\n", cbConnStrOutMax, + pcbConnStrOut); if (SQL_SUCCESS == ret) { CC_set_error(conn, CONN_TRUNCATED, "the ConnStrOut is too small", func); @@ -281,7 +281,7 @@ RETCODE SQL_API SQLDescribeColW(HSTMT StatementHandle, RETCODE SQL_API SQLExecDirectW(HSTMT StatementHandle, SQLWCHAR *StatementText, SQLINTEGER TextLength) { - if(StatementHandle == NULL) + if (StatementHandle == NULL) return SQL_ERROR; StatementClass *stmt = (StatementClass *)StatementHandle; @@ -301,7 +301,8 @@ RETCODE SQL_API SQLExecDirectW(HSTMT StatementHandle, SQLWCHAR *StatementText, // Execute statement if statement is ready RETCODE ret = SQL_ERROR; if (!SC_opencheck(stmt, "SQLExecDirectW")) - ret = ESAPI_ExecDirect(StatementHandle, (const SQLCHAR *)stxt, (SQLINTEGER)slen, 1); + ret = ESAPI_ExecDirect(StatementHandle, (const SQLCHAR *)stxt, + (SQLINTEGER)slen, 1); // Exit critical LEAVE_STMT_CS(stmt); @@ -378,7 +379,7 @@ RETCODE SQL_API SQLGetInfoW(HDBC ConnectionHandle, SQLUSMALLINT InfoType, RETCODE SQL_API SQLPrepareW(HSTMT StatementHandle, SQLWCHAR *StatementText, SQLINTEGER TextLength) { - if(StatementHandle == NULL) + if (StatementHandle == NULL) return SQL_ERROR; CSTR func = "SQLPrepareW"; @@ -400,7 +401,8 @@ RETCODE SQL_API SQLPrepareW(HSTMT StatementHandle, SQLWCHAR *StatementText, // Prepare statement if statement is ready RETCODE ret = SQL_ERROR; if (!SC_opencheck(stmt, func)) - ret = ESAPI_Prepare(StatementHandle, (const SQLCHAR *)stxt, (SQLINTEGER)slen); + ret = ESAPI_Prepare(StatementHandle, (const SQLCHAR *)stxt, + (SQLINTEGER)slen); // Exit critical LEAVE_STMT_CS(stmt); @@ -897,3 +899,171 @@ RETCODE SQL_API SQLGetTypeInfoW(SQLHSTMT StatementHandle, LEAVE_STMT_CS(stmt); return ret; } + +/* ODBC 2.x-specific functions */ +// TODO (#590): Add implementations for remaining ODBC 2.x function + +SQLRETURN SQL_API SQLColAttributesW(SQLHSTMT hstmt, SQLUSMALLINT iCol, + SQLUSMALLINT iField, SQLPOINTER pCharAttr, + SQLSMALLINT cbCharAttrMax, + SQLSMALLINT *pcbCharAttr, +#if defined(_WIN64) || defined(_WIN32) || defined(SQLCOLATTRIBUTE_SQLLEN) + SQLLEN *pNumAttr +#else + SQLPOINTER pNumAttr +#endif +) { + CSTR func = "SQLColAttributeW"; + RETCODE ret; + StatementClass *stmt = (StatementClass *)hstmt; + SQLSMALLINT *rgbL, blen = 0, bMax; + char *rgbD = NULL, *rgbDt; + + MYLOG(ES_TRACE, "entering\n"); + if (SC_connection_lost_check(stmt, __FUNCTION__)) + return SQL_ERROR; + + ENTER_STMT_CS(stmt); + SC_clear_error(stmt); + switch (iField) { + case SQL_DESC_BASE_COLUMN_NAME: + case SQL_DESC_BASE_TABLE_NAME: + case SQL_DESC_CATALOG_NAME: + case SQL_DESC_LABEL: + case SQL_DESC_LITERAL_PREFIX: + case SQL_DESC_LITERAL_SUFFIX: + case SQL_DESC_LOCAL_TYPE_NAME: + case SQL_DESC_NAME: + case SQL_DESC_SCHEMA_NAME: + case SQL_DESC_TABLE_NAME: + case SQL_DESC_TYPE_NAME: + case SQL_COLUMN_NAME: + bMax = cbCharAttrMax * 3 / WCLEN; + rgbD = malloc(bMax); + rgbL = &blen; + for (rgbDt = rgbD;; bMax = blen + 1, rgbDt = realloc(rgbD, bMax)) { + if (!rgbDt) { + ret = SQL_ERROR; + break; + } + rgbD = rgbDt; + ret = ESAPI_ColAttributes(hstmt, iCol, iField, rgbD, bMax, rgbL, + pNumAttr); + if (SQL_SUCCESS_WITH_INFO != ret || blen < bMax) + break; + } + if (SQL_SUCCEEDED(ret)) { + blen = (SQLSMALLINT)utf8_to_ucs2( + rgbD, blen, (SQLWCHAR *)pCharAttr, cbCharAttrMax / WCLEN); + if (SQL_SUCCESS == ret + && blen * WCLEN >= (unsigned long)cbCharAttrMax) { + ret = SQL_SUCCESS_WITH_INFO; + SC_set_error(stmt, STMT_TRUNCATED, + "The buffer was too small for the pCharAttr.", + func); + } + if (pcbCharAttr) + *pcbCharAttr = blen * WCLEN; + } + if (rgbD) + free(rgbD); + break; + default: + rgbD = pCharAttr; + bMax = cbCharAttrMax; + rgbL = pcbCharAttr; + ret = ESAPI_ColAttributes(hstmt, iCol, iField, rgbD, bMax, rgbL, + pNumAttr); + break; + } + LEAVE_STMT_CS(stmt); + + return ret; +} + +RETCODE SQL_API SQLGetConnectOptionW(HDBC ConnectionHandle, SQLUSMALLINT Option, + PTR Value) { + ConnectionClass *conn = (ConnectionClass *)ConnectionHandle; + RETCODE ret; + + ENTER_CONN_CS(conn); + CC_clear_error(conn); + MYLOG(ES_TRACE, "entering " FORMAT_UINTEGER "\n", Option); + ret = ESAPI_GetConnectOption(ConnectionHandle, Option, Value, NULL, 0); + LEAVE_CONN_CS(conn); + return ret; +} + +RETCODE SQL_API SQLSetConnectOptionW(HDBC ConnectionHandle, SQLUSMALLINT Option, + SQLULEN Value) { + ConnectionClass *conn = (ConnectionClass *)ConnectionHandle; + RETCODE ret; + + MYLOG(ES_TRACE, "entering " FORMAT_INTEGER "\n", Option); + ENTER_CONN_CS(conn); + CC_clear_error(conn); + ret = ESAPI_SetConnectOption(ConnectionHandle, Option, Value); + LEAVE_CONN_CS(conn); + return ret; +} + +RETCODE SQL_API SQLErrorW(SQLHENV EnvironmentHandle, SQLHDBC ConnectionHandle, + SQLHSTMT StatementHandle, SQLWCHAR *Sqlstate, + SQLINTEGER *NativeError, SQLWCHAR *MessageText, + SQLSMALLINT BufferLength, SQLSMALLINT *TextLength) { + RETCODE ret; + SQLSMALLINT buflen; + SQLSMALLINT tlen = 0; + SQLSMALLINT RecNumber = 1; + char qstr_ansi[8], *mtxt = NULL; + + MYLOG(ES_TRACE, "entering\n"); + buflen = 0; + if (MessageText && BufferLength > 0) { + buflen = BufferLength; + mtxt = malloc(buflen); + } + + if (StatementHandle) { + ret = ESAPI_StmtError(StatementHandle, RecNumber, (SQLCHAR *)qstr_ansi, + NativeError, (SQLCHAR *)mtxt, buflen, &tlen, 0); + } else if (ConnectionHandle) { + ret = ESAPI_ConnectError(ConnectionHandle, RecNumber, + (SQLCHAR *)qstr_ansi, NativeError, + (SQLCHAR *)mtxt, buflen, &tlen, 0); + } else if (EnvironmentHandle) { + ret = ESAPI_EnvError(EnvironmentHandle, RecNumber, (SQLCHAR *)qstr_ansi, + NativeError, (SQLCHAR *)mtxt, buflen, &tlen, 0); + } else { + ret = SQL_ERROR; + } + + if (SQL_SUCCEEDED(ret)) { + if (Sqlstate) + utf8_to_ucs2(qstr_ansi, -1, Sqlstate, 6); + if (mtxt && tlen <= BufferLength) { + // TODO (#612): Verify wide character conversion + SQLULEN ulen = utf8_to_ucs2_lf(mtxt, tlen, FALSE, MessageText, + BufferLength, TRUE); + if (ulen == (SQLULEN)-1) + tlen = (SQLSMALLINT)locale_to_sqlwchar( + (SQLWCHAR *)MessageText, mtxt, BufferLength, FALSE); + else + tlen = (SQLSMALLINT)ulen; + if (tlen >= BufferLength) + ret = SQL_SUCCESS_WITH_INFO; + else if (tlen < 0) { + char errc[32]; + + SPRINTF_FIXED(errc, "Error: SqlState=%s", qstr_ansi); + tlen = (SQLSMALLINT)utf8_to_ucs2(errc, -1, MessageText, + BufferLength); + } + } + if (TextLength) + *TextLength = tlen; + } + if (mtxt) + free(mtxt); + return ret; +} diff --git a/sql-odbc/src/odfesqlodbc/statement.c b/sql-odbc/src/odfesqlodbc/statement.c index af5edf49fc..9e6e8e6334 100644 --- a/sql-odbc/src/odfesqlodbc/statement.c +++ b/sql-odbc/src/odfesqlodbc/statement.c @@ -767,6 +767,7 @@ static const struct { {STMT_RESTRICTED_DATA_TYPE_ERROR, "07006", "07006"}, {STMT_INVALID_CURSOR_STATE_ERROR, "07005", "24000"}, {STMT_CREATE_TABLE_ERROR, "42S01", "S0001"}, /* table already exists */ + {STMT_QUERY_SYNTAX_ERROR, "42000", "42000"}, /* query syntax error */ {STMT_NO_CURSOR_NAME, "S1015", "S1015"}, {STMT_INVALID_CURSOR_NAME, "34000", "34000"}, {STMT_INVALID_ARGUMENT_NO, "HY024", "S1009"}, /* invalid argument value */ diff --git a/sql-odbc/src/odfesqlodbc/statement.h b/sql-odbc/src/odfesqlodbc/statement.h index 5c8b6160e6..1654cc4031 100644 --- a/sql-odbc/src/odfesqlodbc/statement.h +++ b/sql-odbc/src/odfesqlodbc/statement.h @@ -94,6 +94,7 @@ enum { STMT_RESTRICTED_DATA_TYPE_ERROR, STMT_INVALID_CURSOR_STATE_ERROR, STMT_CREATE_TABLE_ERROR, + STMT_QUERY_SYNTAX_ERROR, STMT_NO_CURSOR_NAME, STMT_INVALID_CURSOR_NAME, STMT_INVALID_ARGUMENT_NO, @@ -254,8 +255,8 @@ struct StatementClass_ { Int2 num_key_fields; Int2 statement_type; /* According to the defines above */ Int2 num_params; - Int2 data_at_exec; /* Number of params needing SQLPutData */ - UDWORD iflag; /* ESAPI_AllocStmt parameter */ + Int2 data_at_exec; /* Number of params needing SQLPutData */ + UDWORD iflag; /* ESAPI_AllocStmt parameter */ PutDataInfo pdata_info; po_ind_t parse_status; po_ind_t proc_return; @@ -269,12 +270,12 @@ struct StatementClass_ { po_ind_t multi_statement; /* -1:unknown 0:single 1:multi */ po_ind_t rb_or_tc; /* rollback on error */ po_ind_t - discard_output_params; /* discard output parameters on parse stage */ - po_ind_t cancel_info; /* cancel information */ - po_ind_t ref_CC_error; /* refer to CC_error ? */ - po_ind_t lock_CC_for_rb; /* lock CC for statement rollback ? */ - po_ind_t join_info; /* have joins ? */ - po_ind_t parse_method; /* parse_statement is forced or ? */ + discard_output_params; /* discard output parameters on parse stage */ + po_ind_t cancel_info; /* cancel information */ + po_ind_t ref_CC_error; /* refer to CC_error ? */ + po_ind_t lock_CC_for_rb; /* lock CC for statement rollback ? */ + po_ind_t join_info; /* have joins ? */ + po_ind_t parse_method; /* parse_statement is forced or ? */ esNAME cursor_name; char *plan_name; unsigned char miscinfo; @@ -403,11 +404,7 @@ enum { }; /* prepared state */ -enum { - NOT_PREPARED = 0, - PREPARED, - EXECUTED -}; +enum { NOT_PREPARED = 0, PREPARED, EXECUTED }; /* misc info */ #define SC_set_fetchcursor(a) ((a)->miscinfo |= (1L << 1)) diff --git a/sql-workbench/CONTRIBUTING.md b/sql-workbench/CONTRIBUTING.md index 841ab0a8a7..2cabf72f91 100644 --- a/sql-workbench/CONTRIBUTING.md +++ b/sql-workbench/CONTRIBUTING.md @@ -11,7 +11,7 @@ information to effectively respond to your bug report or contribution. We welcome you to use the GitHub issue tracker to report bugs or suggest features. -When filing an issue, please check [existing open](https://github.com/opendistro-for-elasticsearch/sql-workbench/issues), or [recently closed](https://github.com/opendistro-for-elasticsearch/sql-workbench/issues?q=is%3Aissue+is%3Aclosed), issues to make sure somebody else hasn't already +When filing an issue, please check [existing open](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aissue+is%3Aclosed+label%3AWorkbench), or [recently closed](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aissue+is%3Aclosed+label%3AWorkbench), issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: * A reproducible test case or series of steps @@ -41,7 +41,7 @@ GitHub provides additional document on [forking a repository](https://help.githu ## Finding contributions to work on -Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any issue tagged ["good first issue"](https://github.com/opendistro-for-elasticsearch/sql-workbench/issues?q=is%3Aopen+label%3A%22help+wanted%22+label%3A%22good+first+issue%22) is a great place to start. +Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any issue tagged ["good first issue"](https://github.com/opendistro-for-elasticsearch/sql/issues?q=is%3Aopen+label%3A%22help+wanted%22+label%3A%22good+first+issue%22+label%3AWorkbench) is a great place to start. ## Code of Conduct diff --git a/sql-workbench/README.md b/sql-workbench/README.md index a2ff72405a..c711be0e46 100644 --- a/sql-workbench/README.md +++ b/sql-workbench/README.md @@ -17,9 +17,16 @@ Please see our technical [documentation](https://opendistro.github.io/for-elasti See the [Kibana contributing guide](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#setting-up-your-development-environment) for more instructions on setting up your development environment. 1. Change your node version to the version specified in `.node-version` inside the Kibana root directory. -1. cd into `plugins` directory in the Kibana source code directory. +1. cd into the Kibana source code directory. 1. Check out this package from version control into the `plugins` directory. -1. Run `yarn kbn bootstrap` inside `kibana/plugins/sql-workbench`. +``` +git clone git@github.com:opendistro-for-elasticsearch/sql.git plugins --no-checkout +cd plugins +echo 'sql-workbench/*' >> .git/info/sparse-checkout +git config core.sparseCheckout true +git checkout master +``` +6. Run `yarn kbn bootstrap` inside `kibana/plugins/sql-workbench`. Ultimately, your directory structure should look like this: @@ -56,7 +63,7 @@ Example output: `./build/opendistro-sql-workbench-*.zip` ## Bugs, Enhancements or Questions -Please file an issue to report any bugs you may find, enhancements you may need or questions you may have [here](https://github.com/opendistro-for-elasticsearch/sql-workbench/issues). +Please file an issue to report any bugs you may find, enhancements you may need or questions you may have [here](https://github.com/opendistro-for-elasticsearch/sql/issues). ## License diff --git a/sql-workbench/index.js b/sql-workbench/index.js index 086e1eee20..7366d0db94 100644 --- a/sql-workbench/index.js +++ b/sql-workbench/index.js @@ -35,6 +35,7 @@ export default function (kibana) { description: 'SQL Workbench', main: 'plugins/' + PLUGIN_NAME + '/app', icon:'plugins/' + PLUGIN_NAME + '/icons/sql.svg', + order: 9050, category: DEFAULT_APP_CATEGORIES.kibana, }, styleSheetPaths: [resolve(__dirname, 'public/app.scss')].find(p => existsSync(p)) diff --git a/sql-workbench/package.json b/sql-workbench/package.json index 143539facf..1b3578b135 100644 --- a/sql-workbench/package.json +++ b/sql-workbench/package.json @@ -4,14 +4,14 @@ "description": "SQL Workbench", "main": "index.js", "license": "Apache-2.0", - "homepage": "https://github.com/opendistro-for-elasticsearch/sql-workbench", + "homepage": "https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-workbench", "kibana": { "version": "7.8.0", "templateVersion": "6.3.3" }, "repository": { "type": "git", - "url": "https://github.com/opendistro-for-elasticsearch/sql-workbench" + "url": "https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-workbench" }, "scripts": { "preinstall": "node ../../preinstall_check", @@ -25,7 +25,7 @@ }, "dependencies": { "brace": "0.11.1", - "lodash": "^4.17.15", + "lodash": "^4.17.19", "react-dom": "^16.3.0", "react-double-scrollbar": "^0.0.15", "node": "^14.0.0" diff --git a/sql-workbench/yarn.lock b/sql-workbench/yarn.lock index d4d112bf24..f9e478d8b7 100644 --- a/sql-workbench/yarn.lock +++ b/sql-workbench/yarn.lock @@ -2,367 +2,366 @@ # yarn lockfile v1 -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.3.tgz#324bcfd8d35cd3d47dae18cde63d752086435e9a" - integrity sha512-fDx9eNW0qz0WkUeqL6tXEXzVlPh6Y5aCDEZesl0xBGA8ndRukX91Uk44ZqnkECp01NAZUdCAl+aiQNGi0k88Eg== +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a" + integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== dependencies: - "@babel/highlight" "^7.10.3" + "@babel/highlight" "^7.10.4" -"@babel/compat-data@^7.10.1", "@babel/compat-data@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.10.3.tgz#9af3e033f36e8e2d6e47570db91e64a846f5d382" - integrity sha512-BDIfJ9uNZuI0LajPfoYV28lX8kyCPMHY6uY4WH1lJdcicmAfxCK5ASzaeV0D/wsUaRH/cLk+amuxtC37sZ8TUg== +"@babel/compat-data@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.10.5.tgz#d38425e67ea96b1480a3f50404d1bf85676301a6" + integrity sha512-mPVoWNzIpYJHbWje0if7Ck36bpbtTvIxOi9+6WSK9wjGEXearAqlwBoTQvVjsAY2VIwgcs8V940geY3okzRCEw== dependencies: browserslist "^4.12.0" invariant "^2.2.4" semver "^5.5.0" "@babel/core@^7.1.0", "@babel/core@^7.7.5", "@babel/core@^7.9.0": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.10.3.tgz#73b0e8ddeec1e3fdd7a2de587a60e17c440ec77e" - integrity sha512-5YqWxYE3pyhIi84L84YcwjeEgS+fa7ZjK6IBVGTjDVfm64njkR2lfDhVR5OudLk8x2GK59YoSyVv+L/03k1q9w== - dependencies: - "@babel/code-frame" "^7.10.3" - "@babel/generator" "^7.10.3" - "@babel/helper-module-transforms" "^7.10.1" - "@babel/helpers" "^7.10.1" - "@babel/parser" "^7.10.3" - "@babel/template" "^7.10.3" - "@babel/traverse" "^7.10.3" - "@babel/types" "^7.10.3" + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.10.5.tgz#1f15e2cca8ad9a1d78a38ddba612f5e7cdbbd330" + integrity sha512-O34LQooYVDXPl7QWCdW9p4NR+QlzOr7xShPPJz8GsuCU3/8ua/wqTr7gmnxXv+WBESiGU/G5s16i6tUvHkNb+w== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.10.5" + "@babel/helper-module-transforms" "^7.10.5" + "@babel/helpers" "^7.10.4" + "@babel/parser" "^7.10.5" + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.10.5" + "@babel/types" "^7.10.5" convert-source-map "^1.7.0" debug "^4.1.0" gensync "^1.0.0-beta.1" json5 "^2.1.2" - lodash "^4.17.13" + lodash "^4.17.19" resolve "^1.3.2" semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.10.3.tgz#32b9a0d963a71d7a54f5f6c15659c3dbc2a523a5" - integrity sha512-drt8MUHbEqRzNR0xnF8nMehbY11b1SDkRw03PSNH/3Rb2Z35oxkddVSi3rcaak0YJQ86PCuE7Qx1jSFhbLNBMA== +"@babel/generator@^7.10.5": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.10.5.tgz#1b903554bc8c583ee8d25f1e8969732e6b829a69" + integrity sha512-3vXxr3FEW7E7lJZiWQ3bM4+v/Vyr9C+hpolQ8BGFr9Y8Ri2tFLWTixmwKBafDujO1WVah4fhZBeU1bieKdghig== dependencies: - "@babel/types" "^7.10.3" + "@babel/types" "^7.10.5" jsesc "^2.5.1" - lodash "^4.17.13" source-map "^0.5.0" -"@babel/helper-annotate-as-pure@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.10.1.tgz#f6d08acc6f70bbd59b436262553fb2e259a1a268" - integrity sha512-ewp3rvJEwLaHgyWGe4wQssC2vjks3E80WiUe2BpMb0KhreTjMROCbxXcEovTrbeGVdQct5VjQfrv9EgC+xMzCw== +"@babel/helper-annotate-as-pure@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.10.4.tgz#5bf0d495a3f757ac3bda48b5bf3b3ba309c72ba3" + integrity sha512-XQlqKQP4vXFB7BN8fEEerrmYvHp3fK/rBkRFz9jaJbzK0B1DSfej9Kc7ZzE8Z/OnId1jpJdNAZ3BFQjWG68rcA== dependencies: - "@babel/types" "^7.10.1" + "@babel/types" "^7.10.4" -"@babel/helper-builder-binary-assignment-operator-visitor@^7.10.1": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.10.3.tgz#4e9012d6701bef0030348d7f9c808209bd3e8687" - integrity sha512-lo4XXRnBlU6eRM92FkiZxpo1xFLmv3VsPFk61zJKMm7XYJfwqXHsYJTY6agoc4a3L8QPw1HqWehO18coZgbT6A== +"@babel/helper-builder-binary-assignment-operator-visitor@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.10.4.tgz#bb0b75f31bf98cbf9ff143c1ae578b87274ae1a3" + integrity sha512-L0zGlFrGWZK4PbT8AszSfLTM5sDU1+Az/En9VrdT8/LmEiJt4zXt+Jve9DCAnQcbqDhCI+29y/L93mrDzddCcg== dependencies: - "@babel/helper-explode-assignable-expression" "^7.10.3" - "@babel/types" "^7.10.3" + "@babel/helper-explode-assignable-expression" "^7.10.4" + "@babel/types" "^7.10.4" -"@babel/helper-builder-react-jsx-experimental@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-react-jsx-experimental/-/helper-builder-react-jsx-experimental-7.10.1.tgz#9a7d58ad184d3ac3bafb1a452cec2bad7e4a0bc8" - integrity sha512-irQJ8kpQUV3JasXPSFQ+LCCtJSc5ceZrPFVj6TElR6XCHssi3jV8ch3odIrNtjJFRZZVbrOEfJMI79TPU/h1pQ== +"@babel/helper-builder-react-jsx-experimental@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-react-jsx-experimental/-/helper-builder-react-jsx-experimental-7.10.5.tgz#f35e956a19955ff08c1258e44a515a6d6248646b" + integrity sha512-Buewnx6M4ttG+NLkKyt7baQn7ScC/Td+e99G914fRU8fGIUivDDgVIQeDHFa5e4CRSJQt58WpNHhsAZgtzVhsg== dependencies: - "@babel/helper-annotate-as-pure" "^7.10.1" - "@babel/helper-module-imports" "^7.10.1" - "@babel/types" "^7.10.1" + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-module-imports" "^7.10.4" + "@babel/types" "^7.10.5" -"@babel/helper-builder-react-jsx@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-react-jsx/-/helper-builder-react-jsx-7.10.3.tgz#62c4b7bb381153a0a5f8d83189b94b9fb5384fc5" - integrity sha512-vkxmuFvmovtqTZknyMGj9+uQAZzz5Z9mrbnkJnPkaYGfKTaSsYcjQdXP0lgrWLVh8wU6bCjOmXOpx+kqUi+S5Q== +"@babel/helper-builder-react-jsx@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-react-jsx/-/helper-builder-react-jsx-7.10.4.tgz#8095cddbff858e6fa9c326daee54a2f2732c1d5d" + integrity sha512-5nPcIZ7+KKDxT1427oBivl9V9YTal7qk0diccnh7RrcgrT/pGFOjgGw1dgryyx1GvHEpXVfoDF6Ak3rTiWh8Rg== dependencies: - "@babel/helper-annotate-as-pure" "^7.10.1" - "@babel/types" "^7.10.3" + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/types" "^7.10.4" -"@babel/helper-compilation-targets@^7.10.2": - version "7.10.2" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.10.2.tgz#a17d9723b6e2c750299d2a14d4637c76936d8285" - integrity sha512-hYgOhF4To2UTB4LTaZepN/4Pl9LD4gfbJx8A34mqoluT8TLbof1mhUlYuNWTEebONa8+UlCC4X0TEXu7AOUyGA== +"@babel/helper-compilation-targets@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.10.4.tgz#804ae8e3f04376607cc791b9d47d540276332bd2" + integrity sha512-a3rYhlsGV0UHNDvrtOXBg8/OpfV0OKTkxKPzIplS1zpx7CygDcWWxckxZeDd3gzPzC4kUT0A4nVFDK0wGMh4MQ== dependencies: - "@babel/compat-data" "^7.10.1" + "@babel/compat-data" "^7.10.4" browserslist "^4.12.0" invariant "^2.2.4" levenary "^1.1.1" semver "^5.5.0" -"@babel/helper-create-class-features-plugin@^7.10.1", "@babel/helper-create-class-features-plugin@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.10.3.tgz#2783daa6866822e3d5ed119163b50f0fc3ae4b35" - integrity sha512-iRT9VwqtdFmv7UheJWthGc/h2s7MqoweBF9RUj77NFZsg9VfISvBTum3k6coAhJ8RWv2tj3yUjA03HxPd0vfpQ== - dependencies: - "@babel/helper-function-name" "^7.10.3" - "@babel/helper-member-expression-to-functions" "^7.10.3" - "@babel/helper-optimise-call-expression" "^7.10.3" - "@babel/helper-plugin-utils" "^7.10.3" - "@babel/helper-replace-supers" "^7.10.1" - "@babel/helper-split-export-declaration" "^7.10.1" - -"@babel/helper-create-regexp-features-plugin@^7.10.1", "@babel/helper-create-regexp-features-plugin@^7.8.3": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.10.1.tgz#1b8feeab1594cbcfbf3ab5a3bbcabac0468efdbd" - integrity sha512-Rx4rHS0pVuJn5pJOqaqcZR4XSgeF9G/pO/79t+4r7380tXFJdzImFnxMU19f83wjSrmKHq6myrM10pFHTGzkUA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.10.1" - "@babel/helper-regex" "^7.10.1" +"@babel/helper-create-class-features-plugin@^7.10.4", "@babel/helper-create-class-features-plugin@^7.10.5": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.10.5.tgz#9f61446ba80e8240b0a5c85c6fdac8459d6f259d" + integrity sha512-0nkdeijB7VlZoLT3r/mY3bUkw3T8WG/hNw+FATs/6+pG2039IJWjTYL0VTISqsNHMUTEnwbVnc89WIJX9Qed0A== + dependencies: + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-member-expression-to-functions" "^7.10.5" + "@babel/helper-optimise-call-expression" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.10.4" + +"@babel/helper-create-regexp-features-plugin@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.10.4.tgz#fdd60d88524659a0b6959c0579925e425714f3b8" + integrity sha512-2/hu58IEPKeoLF45DBwx3XFqsbCXmkdAay4spVr2x0jYgRxrSNp+ePwvSsy9g6YSaNDcKIQVPXk1Ov8S2edk2g== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-regex" "^7.10.4" regexpu-core "^4.7.0" -"@babel/helper-define-map@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.10.3.tgz#d27120a5e57c84727b30944549b2dfeca62401a8" - integrity sha512-bxRzDi4Sin/k0drWCczppOhov1sBSdBvXJObM1NLHQzjhXhwRtn7aRWGvLJWCYbuu2qUk3EKs6Ci9C9ps8XokQ== - dependencies: - "@babel/helper-function-name" "^7.10.3" - "@babel/types" "^7.10.3" - lodash "^4.17.13" - -"@babel/helper-explode-assignable-expression@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.10.3.tgz#9dc14f0cfa2833ea830a9c8a1c742b6e7461b05e" - integrity sha512-0nKcR64XrOC3lsl+uhD15cwxPvaB6QKUDlD84OT9C3myRbhJqTMYir69/RWItUvHpharv0eJ/wk7fl34ONSwZw== - dependencies: - "@babel/traverse" "^7.10.3" - "@babel/types" "^7.10.3" - -"@babel/helper-function-name@^7.10.1", "@babel/helper-function-name@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.10.3.tgz#79316cd75a9fa25ba9787ff54544307ed444f197" - integrity sha512-FvSj2aiOd8zbeqijjgqdMDSyxsGHaMt5Tr0XjQsGKHD3/1FP3wksjnLAWzxw7lvXiej8W1Jt47SKTZ6upQNiRw== +"@babel/helper-define-map@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.10.5.tgz#b53c10db78a640800152692b13393147acb9bb30" + integrity sha512-fMw4kgFB720aQFXSVaXr79pjjcW5puTCM16+rECJ/plGS+zByelE8l9nCpV1GibxTnFVmUuYG9U8wYfQHdzOEQ== + dependencies: + "@babel/helper-function-name" "^7.10.4" + "@babel/types" "^7.10.5" + lodash "^4.17.19" + +"@babel/helper-explode-assignable-expression@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.10.4.tgz#40a1cd917bff1288f699a94a75b37a1a2dbd8c7c" + integrity sha512-4K71RyRQNPRrR85sr5QY4X3VwG4wtVoXZB9+L3r1Gp38DhELyHCtovqydRi7c1Ovb17eRGiQ/FD5s8JdU0Uy5A== + dependencies: + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-function-name@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.10.4.tgz#d2d3b20c59ad8c47112fa7d2a94bc09d5ef82f1a" + integrity sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ== dependencies: - "@babel/helper-get-function-arity" "^7.10.3" - "@babel/template" "^7.10.3" - "@babel/types" "^7.10.3" - -"@babel/helper-get-function-arity@^7.10.1", "@babel/helper-get-function-arity@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.3.tgz#3a28f7b28ccc7719eacd9223b659fdf162e4c45e" - integrity sha512-iUD/gFsR+M6uiy69JA6fzM5seno8oE85IYZdbVVEuQaZlEzMO2MXblh+KSPJgsZAUx0EEbWXU0yJaW7C9CdAVg== - dependencies: - "@babel/types" "^7.10.3" - -"@babel/helper-hoist-variables@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.10.3.tgz#d554f52baf1657ffbd7e5137311abc993bb3f068" - integrity sha512-9JyafKoBt5h20Yv1+BXQMdcXXavozI1vt401KBiRc2qzUepbVnd7ogVNymY1xkQN9fekGwfxtotH2Yf5xsGzgg== - dependencies: - "@babel/types" "^7.10.3" - -"@babel/helper-member-expression-to-functions@^7.10.1", "@babel/helper-member-expression-to-functions@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.10.3.tgz#bc3663ac81ac57c39148fef4c69bf48a77ba8dd6" - integrity sha512-q7+37c4EPLSjNb2NmWOjNwj0+BOyYlssuQ58kHEWk1Z78K5i8vTUsteq78HMieRPQSl/NtpQyJfdjt3qZ5V2vw== - dependencies: - "@babel/types" "^7.10.3" - -"@babel/helper-module-imports@^7.10.1", "@babel/helper-module-imports@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.10.3.tgz#766fa1d57608e53e5676f23ae498ec7a95e1b11a" - integrity sha512-Jtqw5M9pahLSUWA+76nhK9OG8nwYXzhQzVIGFoNaHnXF/r4l7kz4Fl0UAW7B6mqC5myoJiBP5/YQlXQTMfHI9w== - dependencies: - "@babel/types" "^7.10.3" - -"@babel/helper-module-transforms@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.10.1.tgz#24e2f08ee6832c60b157bb0936c86bef7210c622" - integrity sha512-RLHRCAzyJe7Q7sF4oy2cB+kRnU4wDZY/H2xJFGof+M+SJEGhZsb+GFj5j1AD8NiSaVBJ+Pf0/WObiXu/zxWpFg== - dependencies: - "@babel/helper-module-imports" "^7.10.1" - "@babel/helper-replace-supers" "^7.10.1" - "@babel/helper-simple-access" "^7.10.1" - "@babel/helper-split-export-declaration" "^7.10.1" - "@babel/template" "^7.10.1" - "@babel/types" "^7.10.1" - lodash "^4.17.13" - -"@babel/helper-optimise-call-expression@^7.10.1", "@babel/helper-optimise-call-expression@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.10.3.tgz#f53c4b6783093195b0f69330439908841660c530" - integrity sha512-kT2R3VBH/cnSz+yChKpaKRJQJWxdGoc6SjioRId2wkeV3bK0wLLioFpJROrX0U4xr/NmxSSAWT/9Ih5snwIIzg== - dependencies: - "@babel/types" "^7.10.3" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.1", "@babel/helper-plugin-utils@^7.10.3", "@babel/helper-plugin-utils@^7.8.0": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.3.tgz#aac45cccf8bc1873b99a85f34bceef3beb5d3244" - integrity sha512-j/+j8NAWUTxOtx4LKHybpSClxHoq6I91DQ/mKgAXn5oNUPIUiGppjPIX3TDtJWPrdfP9Kfl7e4fgVMiQR9VE/g== - -"@babel/helper-regex@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.10.1.tgz#021cf1a7ba99822f993222a001cc3fec83255b96" - integrity sha512-7isHr19RsIJWWLLFn21ubFt223PjQyg1HY7CZEMRr820HttHPpVvrsIN3bUOo44DEfFV4kBXO7Abbn9KTUZV7g== - dependencies: - lodash "^4.17.13" - -"@babel/helper-remap-async-to-generator@^7.10.1", "@babel/helper-remap-async-to-generator@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.10.3.tgz#18564f8a6748be466970195b876e8bba3bccf442" - integrity sha512-sLB7666ARbJUGDO60ZormmhQOyqMX/shKBXZ7fy937s+3ID8gSrneMvKSSb+8xIM5V7Vn6uNVtOY1vIm26XLtA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.10.1" - "@babel/helper-wrap-function" "^7.10.1" - "@babel/template" "^7.10.3" - "@babel/traverse" "^7.10.3" - "@babel/types" "^7.10.3" - -"@babel/helper-replace-supers@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.10.1.tgz#ec6859d20c5d8087f6a2dc4e014db7228975f13d" - integrity sha512-SOwJzEfpuQwInzzQJGjGaiG578UYmyi2Xw668klPWV5n07B73S0a9btjLk/52Mlcxa+5AdIYqws1KyXRfMoB7A== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.10.1" - "@babel/helper-optimise-call-expression" "^7.10.1" - "@babel/traverse" "^7.10.1" - "@babel/types" "^7.10.1" - -"@babel/helper-simple-access@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.10.1.tgz#08fb7e22ace9eb8326f7e3920a1c2052f13d851e" - integrity sha512-VSWpWzRzn9VtgMJBIWTZ+GP107kZdQ4YplJlCmIrjoLVSi/0upixezHCDG8kpPVTBJpKfxTH01wDhh+jS2zKbw== - dependencies: - "@babel/template" "^7.10.1" - "@babel/types" "^7.10.1" - -"@babel/helper-split-export-declaration@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.10.1.tgz#c6f4be1cbc15e3a868e4c64a17d5d31d754da35f" - integrity sha512-UQ1LVBPrYdbchNhLwj6fetj46BcFwfS4NllJo/1aJsT+1dLTEnXJL0qHqtY7gPzF8S2fXBJamf1biAXV3X077g== - dependencies: - "@babel/types" "^7.10.1" - -"@babel/helper-validator-identifier@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.3.tgz#60d9847f98c4cea1b279e005fdb7c28be5412d15" - integrity sha512-bU8JvtlYpJSBPuj1VUmKpFGaDZuLxASky3LhaKj3bmpSTY6VWooSM8msk+Z0CZoErFye2tlABF6yDkT3FOPAXw== - -"@babel/helper-wrap-function@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.10.1.tgz#956d1310d6696257a7afd47e4c42dfda5dfcedc9" - integrity sha512-C0MzRGteVDn+H32/ZgbAv5r56f2o1fZSA/rj/TYo8JEJNHg+9BdSmKBUND0shxWRztWhjlT2cvHYuynpPsVJwQ== - dependencies: - "@babel/helper-function-name" "^7.10.1" - "@babel/template" "^7.10.1" - "@babel/traverse" "^7.10.1" - "@babel/types" "^7.10.1" - -"@babel/helpers@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.10.1.tgz#a6827b7cb975c9d9cef5fd61d919f60d8844a973" - integrity sha512-muQNHF+IdU6wGgkaJyhhEmI54MOZBKsFfsXFhboz1ybwJ1Kl7IHlbm2a++4jwrmY5UYsgitt5lfqo1wMFcHmyw== - dependencies: - "@babel/template" "^7.10.1" - "@babel/traverse" "^7.10.1" - "@babel/types" "^7.10.1" - -"@babel/highlight@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.3.tgz#c633bb34adf07c5c13156692f5922c81ec53f28d" - integrity sha512-Ih9B/u7AtgEnySE2L2F0Xm0GaM729XqqLfHkalTsbjXGyqmf/6M0Cu0WpvqueUlW+xk88BHw9Nkpj49naU+vWw== - dependencies: - "@babel/helper-validator-identifier" "^7.10.3" + "@babel/helper-get-function-arity" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-get-function-arity@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.4.tgz#98c1cbea0e2332f33f9a4661b8ce1505b2c19ba2" + integrity sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-hoist-variables@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.10.4.tgz#d49b001d1d5a68ca5e6604dda01a6297f7c9381e" + integrity sha512-wljroF5PgCk2juF69kanHVs6vrLwIPNp6DLD+Lrl3hoQ3PpPPikaDRNFA+0t81NOoMt2DL6WW/mdU8k4k6ZzuA== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-member-expression-to-functions@^7.10.4", "@babel/helper-member-expression-to-functions@^7.10.5": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.10.5.tgz#172f56e7a63e78112f3a04055f24365af702e7ee" + integrity sha512-HiqJpYD5+WopCXIAbQDG0zye5XYVvcO9w/DHp5GsaGkRUaamLj2bEtu6i8rnGGprAhHM3qidCMgp71HF4endhA== + dependencies: + "@babel/types" "^7.10.5" + +"@babel/helper-module-imports@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.10.4.tgz#4c5c54be04bd31670a7382797d75b9fa2e5b5620" + integrity sha512-nEQJHqYavI217oD9+s5MUBzk6x1IlvoS9WTPfgG43CbMEeStE0v+r+TucWdx8KFGowPGvyOkDT9+7DHedIDnVw== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-module-transforms@^7.10.4", "@babel/helper-module-transforms@^7.10.5": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.10.5.tgz#120c271c0b3353673fcdfd8c053db3c544a260d6" + integrity sha512-4P+CWMJ6/j1W915ITJaUkadLObmCRRSC234uctJfn/vHrsLNxsR8dwlcXv9ZhJWzl77awf+mWXSZEKt5t0OnlA== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" + "@babel/helper-simple-access" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.5" + lodash "^4.17.19" + +"@babel/helper-optimise-call-expression@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.10.4.tgz#50dc96413d594f995a77905905b05893cd779673" + integrity sha512-n3UGKY4VXwXThEiKrgRAoVPBMqeoPgHVqiHZOanAJCG9nQUL2pLRQirUzl0ioKclHGpGqRgIOkgcIJaIWLpygg== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.8.0": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz#2f75a831269d4f677de49986dff59927533cf375" + integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== + +"@babel/helper-regex@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.10.5.tgz#32dfbb79899073c415557053a19bd055aae50ae0" + integrity sha512-68kdUAzDrljqBrio7DYAEgCoJHxppJOERHOgOrDN7WjOzP0ZQ1LsSDRXcemzVZaLvjaJsJEESb6qt+znNuENDg== + dependencies: + lodash "^4.17.19" + +"@babel/helper-remap-async-to-generator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.10.4.tgz#fce8bea4e9690bbe923056ded21e54b4e8b68ed5" + integrity sha512-86Lsr6NNw3qTNl+TBcF1oRZMaVzJtbWTyTko+CQL/tvNvcGYEFKbLXDPxtW0HKk3McNOk4KzY55itGWCAGK5tg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-wrap-function" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-replace-supers@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.10.4.tgz#d585cd9388ea06e6031e4cd44b6713cbead9e6cf" + integrity sha512-sPxZfFXocEymYTdVK1UNmFPBN+Hv5mJkLPsYWwGBxZAxaWfFu+xqp7b6qWD0yjNuNL2VKc6L5M18tOXUP7NU0A== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.10.4" + "@babel/helper-optimise-call-expression" "^7.10.4" + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-simple-access@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.10.4.tgz#0f5ccda2945277a2a7a2d3a821e15395edcf3461" + integrity sha512-0fMy72ej/VEvF8ULmX6yb5MtHG4uH4Dbd6I/aHDb/JVg0bbivwt9Wg+h3uMvX+QSFtwr5MeItvazbrc4jtRAXw== + dependencies: + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-split-export-declaration@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.10.4.tgz#2c70576eaa3b5609b24cb99db2888cc3fc4251d1" + integrity sha512-pySBTeoUff56fL5CBU2hWm9TesA4r/rOkI9DyJLvvgz09MB9YtfIYe3iBriVaYNaPe+Alua0vBIOVOLs2buWhg== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-validator-identifier@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2" + integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw== + +"@babel/helper-wrap-function@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.10.4.tgz#8a6f701eab0ff39f765b5a1cfef409990e624b87" + integrity sha512-6py45WvEF0MhiLrdxtRjKjufwLL1/ob2qDJgg5JgNdojBAZSAKnAjkyOCNug6n+OBl4VW76XjvgSFTdaMcW0Ug== + dependencies: + "@babel/helper-function-name" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helpers@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.10.4.tgz#2abeb0d721aff7c0a97376b9e1f6f65d7a475044" + integrity sha512-L2gX/XeUONeEbI78dXSrJzGdz4GQ+ZTA/aazfUsFaWjSe95kiCuOZ5HsXvkiw3iwF+mFHSRUfJU8t6YavocdXA== + dependencies: + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/highlight@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143" + integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA== + dependencies: + "@babel/helper-validator-identifier" "^7.10.4" chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/parser@^7.1.0", "@babel/parser@^7.10.3", "@babel/parser@^7.7.0": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.3.tgz#7e71d892b0d6e7d04a1af4c3c79d72c1f10f5315" - integrity sha512-oJtNJCMFdIMwXGmx+KxuaD7i3b8uS7TTFYW/FNG2BT8m+fmGHoiPYoH0Pe3gya07WuFmM5FCDIr1x0irkD/hyA== +"@babel/parser@^7.1.0", "@babel/parser@^7.10.4", "@babel/parser@^7.10.5", "@babel/parser@^7.7.0": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.5.tgz#e7c6bf5a7deff957cec9f04b551e2762909d826b" + integrity sha512-wfryxy4bE1UivvQKSQDU4/X6dr+i8bctjUjj8Zyt3DQy7NtPizJXT8M52nqpNKL+nq2PW8lxk4ZqLj0fD4B4hQ== -"@babel/plugin-proposal-async-generator-functions@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.10.3.tgz#5a02453d46e5362e2073c7278beab2e53ad7d939" - integrity sha512-WUUWM7YTOudF4jZBAJIW9D7aViYC/Fn0Pln4RIHlQALyno3sXSjqmTA4Zy1TKC2D49RCR8Y/Pn4OIUtEypK3CA== +"@babel/plugin-proposal-async-generator-functions@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.10.5.tgz#3491cabf2f7c179ab820606cec27fed15e0e8558" + integrity sha512-cNMCVezQbrRGvXJwm9fu/1sJj9bHdGAgKodZdLqOQIpfoH3raqmRPBM17+lh7CzhiKRRBrGtZL9WcjxSoGYUSg== dependencies: - "@babel/helper-plugin-utils" "^7.10.3" - "@babel/helper-remap-async-to-generator" "^7.10.3" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-remap-async-to-generator" "^7.10.4" "@babel/plugin-syntax-async-generators" "^7.8.0" -"@babel/plugin-proposal-class-properties@^7.10.1", "@babel/plugin-proposal-class-properties@^7.7.4": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.10.1.tgz#046bc7f6550bb08d9bd1d4f060f5f5a4f1087e01" - integrity sha512-sqdGWgoXlnOdgMXU+9MbhzwFRgxVLeiGBqTrnuS7LC2IBU31wSsESbTUreT2O418obpfPdGUR2GbEufZF1bpqw== +"@babel/plugin-proposal-class-properties@^7.10.4", "@babel/plugin-proposal-class-properties@^7.7.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.10.4.tgz#a33bf632da390a59c7a8c570045d1115cd778807" + integrity sha512-vhwkEROxzcHGNu2mzUC0OFFNXdZ4M23ib8aRRcJSsW8BZK9pQMD7QB7csl97NBbgGZO7ZyHUyKDnxzOaP4IrCg== dependencies: - "@babel/helper-create-class-features-plugin" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-create-class-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-proposal-dynamic-import@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.10.1.tgz#e36979dc1dc3b73f6d6816fc4951da2363488ef0" - integrity sha512-Cpc2yUVHTEGPlmiQzXj026kqwjEQAD9I4ZC16uzdbgWgitg/UHKHLffKNCQZ5+y8jpIZPJcKcwsr2HwPh+w3XA== +"@babel/plugin-proposal-dynamic-import@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.10.4.tgz#ba57a26cb98b37741e9d5bca1b8b0ddf8291f17e" + integrity sha512-up6oID1LeidOOASNXgv/CFbgBqTuKJ0cJjz6An5tWD+NVBNlp3VNSBxv2ZdU7SYl3NxJC7agAQDApZusV6uFwQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-dynamic-import" "^7.8.0" -"@babel/plugin-proposal-json-strings@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.10.1.tgz#b1e691ee24c651b5a5e32213222b2379734aff09" - integrity sha512-m8r5BmV+ZLpWPtMY2mOKN7wre6HIO4gfIiV+eOmsnZABNenrt/kzYBwrh+KOfgumSWpnlGs5F70J8afYMSJMBg== +"@babel/plugin-proposal-json-strings@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.10.4.tgz#593e59c63528160233bd321b1aebe0820c2341db" + integrity sha512-fCL7QF0Jo83uy1K0P2YXrfX11tj3lkpN7l4dMv9Y9VkowkhkQDwFHFd8IiwyK5MZjE8UpbgokkgtcReH88Abaw== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings" "^7.8.0" -"@babel/plugin-proposal-nullish-coalescing-operator@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.10.1.tgz#02dca21673842ff2fe763ac253777f235e9bbf78" - integrity sha512-56cI/uHYgL2C8HVuHOuvVowihhX0sxb3nnfVRzUeVHTWmRHTZrKuAh/OBIMggGU/S1g/1D2CRCXqP+3u7vX7iA== +"@babel/plugin-proposal-nullish-coalescing-operator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.10.4.tgz#02a7e961fc32e6d5b2db0649e01bf80ddee7e04a" + integrity sha512-wq5n1M3ZUlHl9sqT2ok1T2/MTt6AXE0e1Lz4WzWBr95LsAZ5qDXe4KnFuauYyEyLiohvXFMdbsOTMyLZs91Zlw== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" -"@babel/plugin-proposal-numeric-separator@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.10.1.tgz#a9a38bc34f78bdfd981e791c27c6fdcec478c123" - integrity sha512-jjfym4N9HtCiNfyyLAVD8WqPYeHUrw4ihxuAynWj6zzp2gf9Ey2f7ImhFm6ikB3CLf5Z/zmcJDri6B4+9j9RsA== +"@babel/plugin-proposal-numeric-separator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.10.4.tgz#ce1590ff0a65ad12970a609d78855e9a4c1aef06" + integrity sha512-73/G7QoRoeNkLZFxsoCCvlg4ezE4eM+57PnOqgaPOozd5myfj7p0muD1mRVJvbUWbOzD+q3No2bWbaKy+DJ8DA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/plugin-syntax-numeric-separator" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" -"@babel/plugin-proposal-object-rest-spread@^7.10.3", "@babel/plugin-proposal-object-rest-spread@^7.7.4": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.10.3.tgz#b8d0d22f70afa34ad84b7a200ff772f9b9fce474" - integrity sha512-ZZh5leCIlH9lni5bU/wB/UcjtcVLgR8gc+FAgW2OOY+m9h1II3ItTO1/cewNUcsIDZSYcSaz/rYVls+Fb0ExVQ== +"@babel/plugin-proposal-object-rest-spread@^7.10.4", "@babel/plugin-proposal-object-rest-spread@^7.7.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.10.4.tgz#50129ac216b9a6a55b3853fdd923e74bf553a4c0" + integrity sha512-6vh4SqRuLLarjgeOf4EaROJAHjvu9Gl+/346PbDH9yWbJyfnJ/ah3jmYKYtswEyCoWZiidvVHjHshd4WgjB9BA== dependencies: - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-object-rest-spread" "^7.8.0" - "@babel/plugin-transform-parameters" "^7.10.1" + "@babel/plugin-transform-parameters" "^7.10.4" -"@babel/plugin-proposal-optional-catch-binding@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.10.1.tgz#c9f86d99305f9fa531b568ff5ab8c964b8b223d2" - integrity sha512-VqExgeE62YBqI3ogkGoOJp1R6u12DFZjqwJhqtKc2o5m1YTUuUWnos7bZQFBhwkxIFpWYJ7uB75U7VAPPiKETA== +"@babel/plugin-proposal-optional-catch-binding@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.10.4.tgz#31c938309d24a78a49d68fdabffaa863758554dd" + integrity sha512-LflT6nPh+GK2MnFiKDyLiqSqVHkQnVf7hdoAvyTnnKj9xB3docGRsdPuxp6qqqW19ifK3xgc9U5/FwrSaCNX5g== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" -"@babel/plugin-proposal-optional-chaining@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.10.3.tgz#9a726f94622b653c0a3a7a59cdce94730f526f7c" - integrity sha512-yyG3n9dJ1vZ6v5sfmIlMMZ8azQoqx/5/nZTSWX1td6L1H1bsjzA8TInDChpafCZiJkeOFzp/PtrfigAQXxI1Ng== +"@babel/plugin-proposal-optional-chaining@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.10.4.tgz#750f1255e930a1f82d8cdde45031f81a0d0adff7" + integrity sha512-ZIhQIEeavTgouyMSdZRap4VPPHqJJ3NEs2cuHs5p0erH+iz6khB0qfgU8g7UuJkG88+fBMy23ZiU+nuHvekJeQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-optional-chaining" "^7.8.0" -"@babel/plugin-proposal-private-methods@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.10.1.tgz#ed85e8058ab0fe309c3f448e5e1b73ca89cdb598" - integrity sha512-RZecFFJjDiQ2z6maFprLgrdnm0OzoC23Mx89xf1CcEsxmHuzuXOdniEuI+S3v7vjQG4F5sa6YtUp+19sZuSxHg== +"@babel/plugin-proposal-private-methods@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.10.4.tgz#b160d972b8fdba5c7d111a145fc8c421fc2a6909" + integrity sha512-wh5GJleuI8k3emgTg5KkJK6kHNsGEr0uBTDBuQUBJwckk9xs1ez79ioheEVVxMLyPscB0LfkbVHslQqIzWV6Bw== dependencies: - "@babel/helper-create-class-features-plugin" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-create-class-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-proposal-unicode-property-regex@^7.10.1", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.10.1.tgz#dc04feb25e2dd70c12b05d680190e138fa2c0c6f" - integrity sha512-JjfngYRvwmPwmnbRZyNiPFI8zxCZb8euzbCG/LxyKdeTb59tVciKo9GK9bi6JYKInk1H11Dq9j/zRqIH4KigfQ== +"@babel/plugin-proposal-unicode-property-regex@^7.10.4", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.10.4.tgz#4483cda53041ce3413b7fe2f00022665ddfaa75d" + integrity sha512-H+3fOgPnEXFL9zGYtKQe4IDOPKYlZdF1kqFDQRRb8PK4B8af1vAGK04tF5iQAAsui+mHNBQSAtd2/ndEDe9wuA== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-create-regexp-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-async-generators@^7.8.0", "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" @@ -378,12 +377,12 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-class-properties@^7.10.1", "@babel/plugin-syntax-class-properties@^7.8.3": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.10.1.tgz#d5bc0645913df5b17ad7eda0fa2308330bde34c5" - integrity sha512-Gf2Yx/iRs1JREDtVZ56OrjjgFHCaldpTnuy9BHla10qyVT3YkIIGEtoDWhyop0ksu1GvNjHIoYRBqm3zoR1jyQ== +"@babel/plugin-syntax-class-properties@^7.10.4", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.10.4.tgz#6644e6a0baa55a61f9e3231f6c9eeb6ee46c124c" + integrity sha512-GCSBF7iUle6rNugfURwNmCGG3Z/2+opxAMLs1nND4bhEG5PuxTIggDBoeYYSujAlLtsupzOHYJQgPS3pivwXIA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-dynamic-import@^7.8.0": version "7.8.3" @@ -393,11 +392,11 @@ "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.1.tgz#3e59120ed8b3c2ccc5abb1cfc7aaa3ea01cd36b6" - integrity sha512-ypC4jwfIVF72og0dgvEcFRdOM2V9Qm1tu7RGmdZOlhsccyK0wisXmMObGuWEOd5jQ+K9wcIgSNftCpk2vkjUfQ== + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings@^7.8.0", "@babel/plugin-syntax-json-strings@^7.8.3": version "7.8.3" @@ -406,19 +405,19 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-jsx@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.10.1.tgz#0ae371134a42b91d5418feb3c8c8d43e1565d2da" - integrity sha512-+OxyOArpVFXQeXKLO9o+r2I4dIoVoy6+Uu0vKELrlweDM3QJADZj+Z+5ERansZqIZBcLj42vHnDI8Rz9BnRIuQ== +"@babel/plugin-syntax-jsx@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.10.4.tgz#39abaae3cbf710c4373d8429484e6ba21340166c" + integrity sha512-KCg9mio9jwiARCB7WAcQ7Y1q+qicILjoK8LP/VkPkEKaf5dkaZZK1EcTe91a3JJlZ3qy6L5s9X52boEYi8DM9g== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.1.tgz#fffee77b4934ce77f3b427649ecdddbec1958550" - integrity sha512-XyHIFa9kdrgJS91CUH+ccPVTnJShr8nLGc5bG2IhGXv5p1Rd+8BleGE5yzIg2Nc1QZAdHDa0Qp4m6066OL96Iw== + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.0", "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": version "7.8.3" @@ -427,12 +426,12 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-numeric-separator@^7.10.1", "@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.1.tgz#25761ee7410bc8cf97327ba741ee94e4a61b7d99" - integrity sha512-uTd0OsHrpe3tH5gRPTxG8Voh99/WCU78vIm5NMRYPAqC8lR4vajt6KkCAknCHrx24vkPdd/05yfdGSB4EIY2mg== +"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" @@ -455,404 +454,403 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-top-level-await@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.10.1.tgz#8b8733f8c57397b3eaa47ddba8841586dcaef362" - integrity sha512-hgA5RYkmZm8FTFT3yu2N9Bx7yVVOKYT6yEdXXo6j2JTm0wNxgqaGeQVaSHRjhfnQbX91DtjFB6McRFSlcJH3xQ== +"@babel/plugin-syntax-top-level-await@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.10.4.tgz#4bbeb8917b54fcf768364e0a81f560e33a3ef57d" + integrity sha512-ni1brg4lXEmWyafKr0ccFWkJG0CeMt4WV1oyeBW6EFObF4oOHclbkj5cARxAPQyAQ2UTuplJyK4nfkXIMMFvsQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-typescript@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.10.1.tgz#5e82bc27bb4202b93b949b029e699db536733810" - integrity sha512-X/d8glkrAtra7CaQGMiGs/OGa6XgUzqPcBXCIGFCpCqnfGlT0Wfbzo/B89xHhnInTaItPK8LALblVXcUOEh95Q== +"@babel/plugin-syntax-typescript@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.10.4.tgz#2f55e770d3501e83af217d782cb7517d7bb34d25" + integrity sha512-oSAEz1YkBCAKr5Yiq8/BNtvSAPwkp/IyUnwZogd8p+F0RuYQQrLeRUzIQhueQTTBy/F+a40uS7OFKxnkRvmvFQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-arrow-functions@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.10.1.tgz#cb5ee3a36f0863c06ead0b409b4cc43a889b295b" - integrity sha512-6AZHgFJKP3DJX0eCNJj01RpytUa3SOGawIxweHkNX2L6PYikOZmoh5B0d7hIHaIgveMjX990IAa/xK7jRTN8OA== +"@babel/plugin-transform-arrow-functions@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.10.4.tgz#e22960d77e697c74f41c501d44d73dbf8a6a64cd" + integrity sha512-9J/oD1jV0ZCBcgnoFWFq1vJd4msoKb/TCpGNFyyLt0zABdcvgK3aYikZ8HjzB14c26bc7E3Q1yugpwGy2aTPNA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-async-to-generator@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.10.1.tgz#e5153eb1a3e028f79194ed8a7a4bf55f862b2062" - integrity sha512-XCgYjJ8TY2slj6SReBUyamJn3k2JLUIiiR5b6t1mNCMSvv7yx+jJpaewakikp0uWFQSF7ChPPoe3dHmXLpISkg== +"@babel/plugin-transform-async-to-generator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.10.4.tgz#41a5017e49eb6f3cda9392a51eef29405b245a37" + integrity sha512-F6nREOan7J5UXTLsDsZG3DXmZSVofr2tGNwfdrVwkDWHfQckbQXnXSPfD7iO+c/2HGqycwyLST3DnZ16n+cBJQ== dependencies: - "@babel/helper-module-imports" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/helper-remap-async-to-generator" "^7.10.1" + "@babel/helper-module-imports" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-remap-async-to-generator" "^7.10.4" -"@babel/plugin-transform-block-scoped-functions@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.10.1.tgz#146856e756d54b20fff14b819456b3e01820b85d" - integrity sha512-B7K15Xp8lv0sOJrdVAoukKlxP9N59HS48V1J3U/JGj+Ad+MHq+am6xJVs85AgXrQn4LV8vaYFOB+pr/yIuzW8Q== +"@babel/plugin-transform-block-scoped-functions@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.10.4.tgz#1afa595744f75e43a91af73b0d998ecfe4ebc2e8" + integrity sha512-WzXDarQXYYfjaV1szJvN3AD7rZgZzC1JtjJZ8dMHUyiK8mxPRahynp14zzNjU3VkPqPsO38CzxiWO1c9ARZ8JA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-block-scoping@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.10.1.tgz#47092d89ca345811451cd0dc5d91605982705d5e" - integrity sha512-8bpWG6TtF5akdhIm/uWTyjHqENpy13Fx8chg7pFH875aNLwX8JxIxqm08gmAT+Whe6AOmaTeLPe7dpLbXt+xUw== +"@babel/plugin-transform-block-scoping@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.10.5.tgz#b81b8aafefbfe68f0f65f7ef397b9ece68a6037d" + integrity sha512-6Ycw3hjpQti0qssQcA6AMSFDHeNJ++R6dIMnpRqUjFeBBTmTDPa8zgF90OVfTvAo11mXZTlVUViY1g8ffrURLg== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - lodash "^4.17.13" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-classes@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.10.3.tgz#8d9a656bc3d01f3ff69e1fccb354b0f9d72ac544" - integrity sha512-irEX0ChJLaZVC7FvvRoSIxJlmk0IczFLcwaRXUArBKYHCHbOhe57aG8q3uw/fJsoSXvZhjRX960hyeAGlVBXZw== +"@babel/plugin-transform-classes@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.10.4.tgz#405136af2b3e218bc4a1926228bc917ab1a0adc7" + integrity sha512-2oZ9qLjt161dn1ZE0Ms66xBncQH4In8Sqw1YWgBUZuGVJJS5c0OFZXL6dP2MRHrkU/eKhWg8CzFJhRQl50rQxA== dependencies: - "@babel/helper-annotate-as-pure" "^7.10.1" - "@babel/helper-define-map" "^7.10.3" - "@babel/helper-function-name" "^7.10.3" - "@babel/helper-optimise-call-expression" "^7.10.3" - "@babel/helper-plugin-utils" "^7.10.3" - "@babel/helper-replace-supers" "^7.10.1" - "@babel/helper-split-export-declaration" "^7.10.1" + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-define-map" "^7.10.4" + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-optimise-call-expression" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.10.4" globals "^11.1.0" -"@babel/plugin-transform-computed-properties@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.10.3.tgz#d3aa6eef67cb967150f76faff20f0abbf553757b" - integrity sha512-GWzhaBOsdbjVFav96drOz7FzrcEW6AP5nax0gLIpstiFaI3LOb2tAg06TimaWU6YKOfUACK3FVrxPJ4GSc5TgA== +"@babel/plugin-transform-computed-properties@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.10.4.tgz#9ded83a816e82ded28d52d4b4ecbdd810cdfc0eb" + integrity sha512-JFwVDXcP/hM/TbyzGq3l/XWGut7p46Z3QvqFMXTfk6/09m7xZHJUN9xHfsv7vqqD4YnfI5ueYdSJtXqqBLyjBw== dependencies: - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-destructuring@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.10.1.tgz#abd58e51337815ca3a22a336b85f62b998e71907" - integrity sha512-V/nUc4yGWG71OhaTH705pU8ZSdM6c1KmmLP8ys59oOYbT7RpMYAR3MsVOt6OHL0WzG7BlTU076va9fjJyYzJMA== +"@babel/plugin-transform-destructuring@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.10.4.tgz#70ddd2b3d1bea83d01509e9bb25ddb3a74fc85e5" + integrity sha512-+WmfvyfsyF603iPa6825mq6Qrb7uLjTOsa3XOFzlYcYDHSS4QmpOWOL0NNBY5qMbvrcf3tq0Cw+v4lxswOBpgA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-dotall-regex@^7.10.1", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.10.1.tgz#920b9fec2d78bb57ebb64a644d5c2ba67cc104ee" - integrity sha512-19VIMsD1dp02RvduFUmfzj8uknaO3uiHHF0s3E1OHnVsNj8oge8EQ5RzHRbJjGSetRnkEuBYO7TG1M5kKjGLOA== +"@babel/plugin-transform-dotall-regex@^7.10.4", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.10.4.tgz#469c2062105c1eb6a040eaf4fac4b488078395ee" + integrity sha512-ZEAVvUTCMlMFAbASYSVQoxIbHm2OkG2MseW6bV2JjIygOjdVv8tuxrCTzj1+Rynh7ODb8GivUy7dzEXzEhuPaA== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-create-regexp-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-duplicate-keys@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.10.1.tgz#c900a793beb096bc9d4d0a9d0cde19518ffc83b9" - integrity sha512-wIEpkX4QvX8Mo9W6XF3EdGttrIPZWozHfEaDTU0WJD/TDnXMvdDh30mzUl/9qWhnf7naicYartcEfUghTCSNpA== +"@babel/plugin-transform-duplicate-keys@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.10.4.tgz#697e50c9fee14380fe843d1f306b295617431e47" + integrity sha512-GL0/fJnmgMclHiBTTWXNlYjYsA7rDrtsazHG6mglaGSTh0KsrW04qml+Bbz9FL0LcJIRwBWL5ZqlNHKTkU3xAA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-exponentiation-operator@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.10.1.tgz#279c3116756a60dd6e6f5e488ba7957db9c59eb3" - integrity sha512-lr/przdAbpEA2BUzRvjXdEDLrArGRRPwbaF9rvayuHRvdQ7lUTTkZnhZrJ4LE2jvgMRFF4f0YuPQ20vhiPYxtA== +"@babel/plugin-transform-exponentiation-operator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.10.4.tgz#5ae338c57f8cf4001bdb35607ae66b92d665af2e" + integrity sha512-S5HgLVgkBcRdyQAHbKj+7KyuWx8C6t5oETmUuwz1pt3WTWJhsUV0WIIXuVvfXMxl/QQyHKlSCNNtaIamG8fysw== dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-for-of@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.10.1.tgz#ff01119784eb0ee32258e8646157ba2501fcfda5" - integrity sha512-US8KCuxfQcn0LwSCMWMma8M2R5mAjJGsmoCBVwlMygvmDUMkTCykc84IqN1M7t+agSfOmLYTInLCHJM+RUoz+w== +"@babel/plugin-transform-for-of@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.10.4.tgz#c08892e8819d3a5db29031b115af511dbbfebae9" + integrity sha512-ItdQfAzu9AlEqmusA/65TqJ79eRcgGmpPPFvBnGILXZH975G0LNjP1yjHvGgfuCxqrPPueXOPe+FsvxmxKiHHQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-function-name@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.10.1.tgz#4ed46fd6e1d8fde2a2ec7b03c66d853d2c92427d" - integrity sha512-//bsKsKFBJfGd65qSNNh1exBy5Y9gD9ZN+DvrJ8f7HXr4avE5POW6zB7Rj6VnqHV33+0vXWUwJT0wSHubiAQkw== +"@babel/plugin-transform-function-name@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.10.4.tgz#6a467880e0fc9638514ba369111811ddbe2644b7" + integrity sha512-OcDCq2y5+E0dVD5MagT5X+yTRbcvFjDI2ZVAottGH6tzqjx/LKpgkUepu3hp/u4tZBzxxpNGwLsAvGBvQ2mJzg== dependencies: - "@babel/helper-function-name" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-literals@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.10.1.tgz#5794f8da82846b22e4e6631ea1658bce708eb46a" - integrity sha512-qi0+5qgevz1NHLZroObRm5A+8JJtibb7vdcPQF1KQE12+Y/xxl8coJ+TpPW9iRq+Mhw/NKLjm+5SHtAHCC7lAw== +"@babel/plugin-transform-literals@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.10.4.tgz#9f42ba0841100a135f22712d0e391c462f571f3c" + integrity sha512-Xd/dFSTEVuUWnyZiMu76/InZxLTYilOSr1UlHV+p115Z/Le2Fi1KXkJUYz0b42DfndostYlPub3m8ZTQlMaiqQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-member-expression-literals@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.10.1.tgz#90347cba31bca6f394b3f7bd95d2bbfd9fce2f39" - integrity sha512-UmaWhDokOFT2GcgU6MkHC11i0NQcL63iqeufXWfRy6pUOGYeCGEKhvfFO6Vz70UfYJYHwveg62GS83Rvpxn+NA== +"@babel/plugin-transform-member-expression-literals@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.10.4.tgz#b1ec44fcf195afcb8db2c62cd8e551c881baf8b7" + integrity sha512-0bFOvPyAoTBhtcJLr9VcwZqKmSjFml1iVxvPL0ReomGU53CX53HsM4h2SzckNdkQcHox1bpAqzxBI1Y09LlBSw== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-modules-amd@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.10.1.tgz#65950e8e05797ebd2fe532b96e19fc5482a1d52a" - integrity sha512-31+hnWSFRI4/ACFr1qkboBbrTxoBIzj7qA69qlq8HY8p7+YCzkCT6/TvQ1a4B0z27VeWtAeJd6pr5G04dc1iHw== +"@babel/plugin-transform-modules-amd@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.10.5.tgz#1b9cddaf05d9e88b3aad339cb3e445c4f020a9b1" + integrity sha512-elm5uruNio7CTLFItVC/rIzKLfQ17+fX7EVz5W0TMgIHFo1zY0Ozzx+lgwhL4plzl8OzVn6Qasx5DeEFyoNiRw== dependencies: - "@babel/helper-module-transforms" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-module-transforms" "^7.10.5" + "@babel/helper-plugin-utils" "^7.10.4" babel-plugin-dynamic-import-node "^2.3.3" -"@babel/plugin-transform-modules-commonjs@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.10.1.tgz#d5ff4b4413ed97ffded99961056e1fb980fb9301" - integrity sha512-AQG4fc3KOah0vdITwt7Gi6hD9BtQP/8bhem7OjbaMoRNCH5Djx42O2vYMfau7QnAzQCa+RJnhJBmFFMGpQEzrg== +"@babel/plugin-transform-modules-commonjs@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.10.4.tgz#66667c3eeda1ebf7896d41f1f16b17105a2fbca0" + integrity sha512-Xj7Uq5o80HDLlW64rVfDBhao6OX89HKUmb+9vWYaLXBZOma4gA6tw4Ni1O5qVDoZWUV0fxMYA0aYzOawz0l+1w== dependencies: - "@babel/helper-module-transforms" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/helper-simple-access" "^7.10.1" + "@babel/helper-module-transforms" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-simple-access" "^7.10.4" babel-plugin-dynamic-import-node "^2.3.3" -"@babel/plugin-transform-modules-systemjs@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.10.3.tgz#004ae727b122b7b146b150d50cba5ffbff4ac56b" - integrity sha512-GWXWQMmE1GH4ALc7YXW56BTh/AlzvDWhUNn9ArFF0+Cz5G8esYlVbXfdyHa1xaD1j+GnBoCeoQNlwtZTVdiG/A== +"@babel/plugin-transform-modules-systemjs@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.10.5.tgz#6270099c854066681bae9e05f87e1b9cadbe8c85" + integrity sha512-f4RLO/OL14/FP1AEbcsWMzpbUz6tssRaeQg11RH1BP/XnPpRoVwgeYViMFacnkaw4k4wjRSjn3ip1Uw9TaXuMw== dependencies: - "@babel/helper-hoist-variables" "^7.10.3" - "@babel/helper-module-transforms" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-hoist-variables" "^7.10.4" + "@babel/helper-module-transforms" "^7.10.5" + "@babel/helper-plugin-utils" "^7.10.4" babel-plugin-dynamic-import-node "^2.3.3" -"@babel/plugin-transform-modules-umd@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.10.1.tgz#ea080911ffc6eb21840a5197a39ede4ee67b1595" - integrity sha512-EIuiRNMd6GB6ulcYlETnYYfgv4AxqrswghmBRQbWLHZxN4s7mupxzglnHqk9ZiUpDI4eRWewedJJNj67PWOXKA== +"@babel/plugin-transform-modules-umd@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.10.4.tgz#9a8481fe81b824654b3a0b65da3df89f3d21839e" + integrity sha512-mohW5q3uAEt8T45YT7Qc5ws6mWgJAaL/8BfWD9Dodo1A3RKWli8wTS+WiQ/knF+tXlPirW/1/MqzzGfCExKECA== dependencies: - "@babel/helper-module-transforms" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-module-transforms" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-named-capturing-groups-regex@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.10.3.tgz#a4f8444d1c5a46f35834a410285f2c901c007ca6" - integrity sha512-I3EH+RMFyVi8Iy/LekQm948Z4Lz4yKT7rK+vuCAeRm0kTa6Z5W7xuhRxDNJv0FPya/her6AUgrDITb70YHtTvA== +"@babel/plugin-transform-named-capturing-groups-regex@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.10.4.tgz#78b4d978810b6f3bcf03f9e318f2fc0ed41aecb6" + integrity sha512-V6LuOnD31kTkxQPhKiVYzYC/Jgdq53irJC/xBSmqcNcqFGV+PER4l6rU5SH2Vl7bH9mLDHcc0+l9HUOe4RNGKA== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.8.3" + "@babel/helper-create-regexp-features-plugin" "^7.10.4" -"@babel/plugin-transform-new-target@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.10.1.tgz#6ee41a5e648da7632e22b6fb54012e87f612f324" - integrity sha512-MBlzPc1nJvbmO9rPr1fQwXOM2iGut+JC92ku6PbiJMMK7SnQc1rytgpopveE3Evn47gzvGYeCdgfCDbZo0ecUw== +"@babel/plugin-transform-new-target@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.10.4.tgz#9097d753cb7b024cb7381a3b2e52e9513a9c6888" + integrity sha512-YXwWUDAH/J6dlfwqlWsztI2Puz1NtUAubXhOPLQ5gjR/qmQ5U96DY4FQO8At33JN4XPBhrjB8I4eMmLROjjLjw== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-object-super@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.10.1.tgz#2e3016b0adbf262983bf0d5121d676a5ed9c4fde" - integrity sha512-WnnStUDN5GL+wGQrJylrnnVlFhFmeArINIR9gjhSeYyvroGhBrSAXYg/RHsnfzmsa+onJrTJrEClPzgNmmQ4Gw== +"@babel/plugin-transform-object-super@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.10.4.tgz#d7146c4d139433e7a6526f888c667e314a093894" + integrity sha512-5iTw0JkdRdJvr7sY0vHqTpnruUpTea32JHmq/atIWqsnNussbRzjEDyWep8UNztt1B5IusBYg8Irb0bLbiEBCQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/helper-replace-supers" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" -"@babel/plugin-transform-parameters@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.10.1.tgz#b25938a3c5fae0354144a720b07b32766f683ddd" - integrity sha512-tJ1T0n6g4dXMsL45YsSzzSDZCxiHXAQp/qHrucOq5gEHncTA3xDxnd5+sZcoQp+N1ZbieAaB8r/VUCG0gqseOg== +"@babel/plugin-transform-parameters@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.10.5.tgz#59d339d58d0b1950435f4043e74e2510005e2c4a" + integrity sha512-xPHwUj5RdFV8l1wuYiu5S9fqWGM2DrYc24TMvUiRrPVm+SM3XeqU9BcokQX/kEUe+p2RBwy+yoiR1w/Blq6ubw== dependencies: - "@babel/helper-get-function-arity" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-get-function-arity" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-property-literals@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.10.1.tgz#cffc7315219230ed81dc53e4625bf86815b6050d" - integrity sha512-Kr6+mgag8auNrgEpbfIWzdXYOvqDHZOF0+Bx2xh4H2EDNwcbRb9lY6nkZg8oSjsX+DH9Ebxm9hOqtKW+gRDeNA== +"@babel/plugin-transform-property-literals@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.10.4.tgz#f6fe54b6590352298785b83edd815d214c42e3c0" + integrity sha512-ofsAcKiUxQ8TY4sScgsGeR2vJIsfrzqvFb9GvJ5UdXDzl+MyYCaBj/FGzXuv7qE0aJcjWMILny1epqelnFlz8g== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-react-display-name@^7.10.1": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.10.3.tgz#e3c246e1b4f3e52cc7633e237ad9194c0ec482e7" - integrity sha512-dOV44bnSW5KZ6kYF6xSHBth7TFiHHZReYXH/JH3XnFNV+soEL1F5d8JT7AJ3ZBncd19Qul7SN4YpBnyWOnQ8KA== +"@babel/plugin-transform-react-display-name@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.10.4.tgz#b5795f4e3e3140419c3611b7a2a3832b9aef328d" + integrity sha512-Zd4X54Mu9SBfPGnEcaGcOrVAYOtjT2on8QZkLKEq1S/tHexG39d9XXGZv19VfRrDjPJzFmPfTAqOQS1pfFOujw== dependencies: - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-react-jsx-development@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.10.1.tgz#1ac6300d8b28ef381ee48e6fec430cc38047b7f3" - integrity sha512-XwDy/FFoCfw9wGFtdn5Z+dHh6HXKHkC6DwKNWpN74VWinUagZfDcEJc3Y8Dn5B3WMVnAllX8Kviaw7MtC5Epwg== +"@babel/plugin-transform-react-jsx-development@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.10.4.tgz#6ec90f244394604623880e15ebc3c34c356258ba" + integrity sha512-RM3ZAd1sU1iQ7rI2dhrZRZGv0aqzNQMbkIUCS1txYpi9wHQ2ZHNjo5TwX+UD6pvFW4AbWqLVYvKy5qJSAyRGjQ== dependencies: - "@babel/helper-builder-react-jsx-experimental" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/plugin-syntax-jsx" "^7.10.1" + "@babel/helper-builder-react-jsx-experimental" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-jsx" "^7.10.4" -"@babel/plugin-transform-react-jsx-self@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.10.1.tgz#22143e14388d72eb88649606bb9e46f421bc3821" - integrity sha512-4p+RBw9d1qV4S749J42ZooeQaBomFPrSxa9JONLHJ1TxCBo3TzJ79vtmG2S2erUT8PDDrPdw4ZbXGr2/1+dILA== +"@babel/plugin-transform-react-jsx-self@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.10.4.tgz#cd301a5fed8988c182ed0b9d55e9bd6db0bd9369" + integrity sha512-yOvxY2pDiVJi0axdTWHSMi5T0DILN+H+SaeJeACHKjQLezEzhLx9nEF9xgpBLPtkZsks9cnb5P9iBEi21En3gg== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/plugin-syntax-jsx" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-jsx" "^7.10.4" -"@babel/plugin-transform-react-jsx-source@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.10.1.tgz#30db3d4ee3cdebbb26a82a9703673714777a4273" - integrity sha512-neAbaKkoiL+LXYbGDvh6PjPG+YeA67OsZlE78u50xbWh2L1/C81uHiNP5d1fw+uqUIoiNdCC8ZB+G4Zh3hShJA== +"@babel/plugin-transform-react-jsx-source@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.10.5.tgz#34f1779117520a779c054f2cdd9680435b9222b4" + integrity sha512-wTeqHVkN1lfPLubRiZH3o73f4rfon42HpgxUSs86Nc+8QIcm/B9s8NNVXu/gwGcOyd7yDib9ikxoDLxJP0UiDA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/plugin-syntax-jsx" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-jsx" "^7.10.4" -"@babel/plugin-transform-react-jsx@^7.10.1": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.10.3.tgz#c07ad86b7c159287c89b643f201f59536231048e" - integrity sha512-Y21E3rZmWICRJnvbGVmDLDZ8HfNDIwjGF3DXYHx1le0v0mIHCs0Gv5SavyW5Z/jgAHLaAoJPiwt+Dr7/zZKcOQ== +"@babel/plugin-transform-react-jsx@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.10.4.tgz#673c9f913948764a4421683b2bef2936968fddf2" + integrity sha512-L+MfRhWjX0eI7Js093MM6MacKU4M6dnCRa/QPDwYMxjljzSCzzlzKzj9Pk4P3OtrPcxr2N3znR419nr3Xw+65A== dependencies: - "@babel/helper-builder-react-jsx" "^7.10.3" - "@babel/helper-builder-react-jsx-experimental" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.3" - "@babel/plugin-syntax-jsx" "^7.10.1" + "@babel/helper-builder-react-jsx" "^7.10.4" + "@babel/helper-builder-react-jsx-experimental" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-jsx" "^7.10.4" -"@babel/plugin-transform-react-pure-annotations@^7.10.1": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.10.3.tgz#97840981673fcb0df2cc33fb25b56cc421f7deef" - integrity sha512-n/fWYGqvTl7OLZs/QcWaKMFdADPvC3V6jYuEOpPyvz97onsW9TXn196fHnHW1ZgkO20/rxLOgKnEtN1q9jkgqA== +"@babel/plugin-transform-react-pure-annotations@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.10.4.tgz#3eefbb73db94afbc075f097523e445354a1c6501" + integrity sha512-+njZkqcOuS8RaPakrnR9KvxjoG1ASJWpoIv/doyWngId88JoFlPlISenGXjrVacZUIALGUr6eodRs1vmPnF23A== dependencies: - "@babel/helper-annotate-as-pure" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-regenerator@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.10.3.tgz#6ec680f140a5ceefd291c221cb7131f6d7e8cb6d" - integrity sha512-H5kNeW0u8mbk0qa1jVIVTeJJL6/TJ81ltD4oyPx0P499DhMJrTmmIFCmJ3QloGpQG8K9symccB7S7SJpCKLwtw== +"@babel/plugin-transform-regenerator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.10.4.tgz#2015e59d839074e76838de2159db421966fd8b63" + integrity sha512-3thAHwtor39A7C04XucbMg17RcZ3Qppfxr22wYzZNcVIkPHfpM9J0SO8zuCV6SZa265kxBJSrfKTvDCYqBFXGw== dependencies: regenerator-transform "^0.14.2" -"@babel/plugin-transform-reserved-words@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.10.1.tgz#0fc1027312b4d1c3276a57890c8ae3bcc0b64a86" - integrity sha512-qN1OMoE2nuqSPmpTqEM7OvJ1FkMEV+BjVeZZm9V9mq/x1JLKQ4pcv8riZJMNN3u2AUGl0ouOMjRr2siecvHqUQ== +"@babel/plugin-transform-reserved-words@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.10.4.tgz#8f2682bcdcef9ed327e1b0861585d7013f8a54dd" + integrity sha512-hGsw1O6Rew1fkFbDImZIEqA8GoidwTAilwCyWqLBM9f+e/u/sQMQu7uX6dyokfOayRuuVfKOW4O7HvaBWM+JlQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-transform-runtime@^7.8.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.10.3.tgz#3b287b06acc534a7cb6e6c71d6b1d88b1922dd6c" - integrity sha512-b5OzMD1Hi8BBzgQdRHyVVaYrk9zG0wset1it2o3BgonkPadXfOv0aXRqd7864DeOIu3FGKP/h6lr15FE5mahVw== + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.10.5.tgz#3b39b7b24830e0c2d8ff7a4489fe5cf99fbace86" + integrity sha512-tV4V/FjElJ9lQtyjr5xD2IFFbgY46r7EeVu5a8CpEKT5laheHKSlFeHjpkPppW3PqzGLAuv5k2qZX5LgVZIX5w== dependencies: - "@babel/helper-module-imports" "^7.10.3" - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-module-imports" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" resolve "^1.8.1" semver "^5.5.1" -"@babel/plugin-transform-shorthand-properties@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.10.1.tgz#e8b54f238a1ccbae482c4dce946180ae7b3143f3" - integrity sha512-AR0E/lZMfLstScFwztApGeyTHJ5u3JUKMjneqRItWeEqDdHWZwAOKycvQNCasCK/3r5YXsuNG25funcJDu7Y2g== +"@babel/plugin-transform-shorthand-properties@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.10.4.tgz#9fd25ec5cdd555bb7f473e5e6ee1c971eede4dd6" + integrity sha512-AC2K/t7o07KeTIxMoHneyX90v3zkm5cjHJEokrPEAGEy3UCp8sLKfnfOIGdZ194fyN4wfX/zZUWT9trJZ0qc+Q== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-spread@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.10.1.tgz#0c6d618a0c4461a274418460a28c9ccf5239a7c8" - integrity sha512-8wTPym6edIrClW8FI2IoaePB91ETOtg36dOkj3bYcNe7aDMN2FXEoUa+WrmPc4xa1u2PQK46fUX2aCb+zo9rfw== +"@babel/plugin-transform-spread@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.10.4.tgz#4e2c85ea0d6abaee1b24dcfbbae426fe8d674cff" + integrity sha512-1e/51G/Ni+7uH5gktbWv+eCED9pP8ZpRhZB3jOaI3mmzfvJTWHkuyYTv0Z5PYtyM+Tr2Ccr9kUdQxn60fI5WuQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-sticky-regex@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.10.1.tgz#90fc89b7526228bed9842cff3588270a7a393b00" - integrity sha512-j17ojftKjrL7ufX8ajKvwRilwqTok4q+BjkknmQw9VNHnItTyMP5anPFzxFJdCQs7clLcWpCV3ma+6qZWLnGMA== +"@babel/plugin-transform-sticky-regex@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.10.4.tgz#8f3889ee8657581130a29d9cc91d7c73b7c4a28d" + integrity sha512-Ddy3QZfIbEV0VYcVtFDCjeE4xwVTJWTmUtorAJkn6u/92Z/nWJNV+mILyqHKrUxXYKA2EoCilgoPePymKL4DvQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/helper-regex" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-regex" "^7.10.4" -"@babel/plugin-transform-template-literals@^7.10.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.10.3.tgz#69d39b3d44b31e7b4864173322565894ce939b25" - integrity sha512-yaBn9OpxQra/bk0/CaA4wr41O0/Whkg6nqjqApcinxM7pro51ojhX6fv1pimAnVjVfDy14K0ULoRL70CA9jWWA== +"@babel/plugin-transform-template-literals@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.10.5.tgz#78bc5d626a6642db3312d9d0f001f5e7639fde8c" + integrity sha512-V/lnPGIb+KT12OQikDvgSuesRX14ck5FfJXt6+tXhdkJ+Vsd0lDCVtF6jcB4rNClYFzaB2jusZ+lNISDk2mMMw== dependencies: - "@babel/helper-annotate-as-pure" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.3" + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-typeof-symbol@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.10.1.tgz#60c0239b69965d166b80a84de7315c1bc7e0bb0e" - integrity sha512-qX8KZcmbvA23zDi+lk9s6hC1FM7jgLHYIjuLgULgc8QtYnmB3tAVIYkNoKRQ75qWBeyzcoMoK8ZQmogGtC/w0g== +"@babel/plugin-transform-typeof-symbol@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.10.4.tgz#9509f1a7eec31c4edbffe137c16cc33ff0bc5bfc" + integrity sha512-QqNgYwuuW0y0H+kUE/GWSR45t/ccRhe14Fs/4ZRouNNQsyd4o3PG4OtHiIrepbM2WKUBDAXKCAK/Lk4VhzTaGA== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-typescript@^7.10.1": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.10.3.tgz#b3b35fb34ef0bd628b4b8329b0e5f985369201d4" - integrity sha512-qU9Lu7oQyh3PGMQncNjQm8RWkzw6LqsWZQlZPQMgrGt6s3YiBIaQ+3CQV/FA/icGS5XlSWZGwo/l8ErTyelS0Q== +"@babel/plugin-transform-typescript@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.10.5.tgz#edf353944e979f40d8ff9fe4e9975d0a465037c5" + integrity sha512-YCyYsFrrRMZ3qR7wRwtSSJovPG5vGyG4ZdcSAivGwTfoasMp3VOB/AKhohu3dFtmB4cCDcsndCSxGtrdliCsZQ== dependencies: - "@babel/helper-create-class-features-plugin" "^7.10.3" - "@babel/helper-plugin-utils" "^7.10.3" - "@babel/plugin-syntax-typescript" "^7.10.1" + "@babel/helper-create-class-features-plugin" "^7.10.5" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-typescript" "^7.10.4" -"@babel/plugin-transform-unicode-escapes@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.10.1.tgz#add0f8483dab60570d9e03cecef6c023aa8c9940" - integrity sha512-zZ0Poh/yy1d4jeDWpx/mNwbKJVwUYJX73q+gyh4bwtG0/iUlzdEu0sLMda8yuDFS6LBQlT/ST1SJAR6zYwXWgw== +"@babel/plugin-transform-unicode-escapes@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.10.4.tgz#feae523391c7651ddac115dae0a9d06857892007" + integrity sha512-y5XJ9waMti2J+e7ij20e+aH+fho7Wb7W8rNuu72aKRwCHFqQdhkdU2lo3uZ9tQuboEJcUFayXdARhcxLQ3+6Fg== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-transform-unicode-regex@^7.10.1": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.10.1.tgz#6b58f2aea7b68df37ac5025d9c88752443a6b43f" - integrity sha512-Y/2a2W299k0VIUdbqYm9X2qS6fE0CUBhhiPpimK6byy7OJ/kORLlIX+J6UrjgNu5awvs62k+6RSslxhcvVw2Tw== +"@babel/plugin-transform-unicode-regex@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.10.4.tgz#e56d71f9282fac6db09c82742055576d5e6d80a8" + integrity sha512-wNfsc4s8N2qnIwpO/WP2ZiSyjfpTamT2C9V9FDH/Ljub9zw6P3SjkXcFmc0RQUt96k2fmIvtla2MMjgTwIAC+A== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.10.1" - "@babel/helper-plugin-utils" "^7.10.1" + "@babel/helper-create-regexp-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" "@babel/preset-env@^7.7.6": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.10.3.tgz#3e58c9861bbd93b6a679987c7e4bd365c56c80c9" - integrity sha512-jHaSUgiewTmly88bJtMHbOd1bJf2ocYxb5BWKSDQIP5tmgFuS/n0gl+nhSrYDhT33m0vPxp+rP8oYYgPgMNQlg== - dependencies: - "@babel/compat-data" "^7.10.3" - "@babel/helper-compilation-targets" "^7.10.2" - "@babel/helper-module-imports" "^7.10.3" - "@babel/helper-plugin-utils" "^7.10.3" - "@babel/plugin-proposal-async-generator-functions" "^7.10.3" - "@babel/plugin-proposal-class-properties" "^7.10.1" - "@babel/plugin-proposal-dynamic-import" "^7.10.1" - "@babel/plugin-proposal-json-strings" "^7.10.1" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.10.1" - "@babel/plugin-proposal-numeric-separator" "^7.10.1" - "@babel/plugin-proposal-object-rest-spread" "^7.10.3" - "@babel/plugin-proposal-optional-catch-binding" "^7.10.1" - "@babel/plugin-proposal-optional-chaining" "^7.10.3" - "@babel/plugin-proposal-private-methods" "^7.10.1" - "@babel/plugin-proposal-unicode-property-regex" "^7.10.1" + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.10.4.tgz#fbf57f9a803afd97f4f32e4f798bb62e4b2bef5f" + integrity sha512-tcmuQ6vupfMZPrLrc38d0sF2OjLT3/bZ0dry5HchNCQbrokoQi4reXqclvkkAT5b+gWc23meVWpve5P/7+w/zw== + dependencies: + "@babel/compat-data" "^7.10.4" + "@babel/helper-compilation-targets" "^7.10.4" + "@babel/helper-module-imports" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-proposal-async-generator-functions" "^7.10.4" + "@babel/plugin-proposal-class-properties" "^7.10.4" + "@babel/plugin-proposal-dynamic-import" "^7.10.4" + "@babel/plugin-proposal-json-strings" "^7.10.4" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.10.4" + "@babel/plugin-proposal-numeric-separator" "^7.10.4" + "@babel/plugin-proposal-object-rest-spread" "^7.10.4" + "@babel/plugin-proposal-optional-catch-binding" "^7.10.4" + "@babel/plugin-proposal-optional-chaining" "^7.10.4" + "@babel/plugin-proposal-private-methods" "^7.10.4" + "@babel/plugin-proposal-unicode-property-regex" "^7.10.4" "@babel/plugin-syntax-async-generators" "^7.8.0" - "@babel/plugin-syntax-class-properties" "^7.10.1" + "@babel/plugin-syntax-class-properties" "^7.10.4" "@babel/plugin-syntax-dynamic-import" "^7.8.0" "@babel/plugin-syntax-json-strings" "^7.8.0" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" - "@babel/plugin-syntax-numeric-separator" "^7.10.1" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" "@babel/plugin-syntax-object-rest-spread" "^7.8.0" "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" "@babel/plugin-syntax-optional-chaining" "^7.8.0" - "@babel/plugin-syntax-top-level-await" "^7.10.1" - "@babel/plugin-transform-arrow-functions" "^7.10.1" - "@babel/plugin-transform-async-to-generator" "^7.10.1" - "@babel/plugin-transform-block-scoped-functions" "^7.10.1" - "@babel/plugin-transform-block-scoping" "^7.10.1" - "@babel/plugin-transform-classes" "^7.10.3" - "@babel/plugin-transform-computed-properties" "^7.10.3" - "@babel/plugin-transform-destructuring" "^7.10.1" - "@babel/plugin-transform-dotall-regex" "^7.10.1" - "@babel/plugin-transform-duplicate-keys" "^7.10.1" - "@babel/plugin-transform-exponentiation-operator" "^7.10.1" - "@babel/plugin-transform-for-of" "^7.10.1" - "@babel/plugin-transform-function-name" "^7.10.1" - "@babel/plugin-transform-literals" "^7.10.1" - "@babel/plugin-transform-member-expression-literals" "^7.10.1" - "@babel/plugin-transform-modules-amd" "^7.10.1" - "@babel/plugin-transform-modules-commonjs" "^7.10.1" - "@babel/plugin-transform-modules-systemjs" "^7.10.3" - "@babel/plugin-transform-modules-umd" "^7.10.1" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.10.3" - "@babel/plugin-transform-new-target" "^7.10.1" - "@babel/plugin-transform-object-super" "^7.10.1" - "@babel/plugin-transform-parameters" "^7.10.1" - "@babel/plugin-transform-property-literals" "^7.10.1" - "@babel/plugin-transform-regenerator" "^7.10.3" - "@babel/plugin-transform-reserved-words" "^7.10.1" - "@babel/plugin-transform-shorthand-properties" "^7.10.1" - "@babel/plugin-transform-spread" "^7.10.1" - "@babel/plugin-transform-sticky-regex" "^7.10.1" - "@babel/plugin-transform-template-literals" "^7.10.3" - "@babel/plugin-transform-typeof-symbol" "^7.10.1" - "@babel/plugin-transform-unicode-escapes" "^7.10.1" - "@babel/plugin-transform-unicode-regex" "^7.10.1" + "@babel/plugin-syntax-top-level-await" "^7.10.4" + "@babel/plugin-transform-arrow-functions" "^7.10.4" + "@babel/plugin-transform-async-to-generator" "^7.10.4" + "@babel/plugin-transform-block-scoped-functions" "^7.10.4" + "@babel/plugin-transform-block-scoping" "^7.10.4" + "@babel/plugin-transform-classes" "^7.10.4" + "@babel/plugin-transform-computed-properties" "^7.10.4" + "@babel/plugin-transform-destructuring" "^7.10.4" + "@babel/plugin-transform-dotall-regex" "^7.10.4" + "@babel/plugin-transform-duplicate-keys" "^7.10.4" + "@babel/plugin-transform-exponentiation-operator" "^7.10.4" + "@babel/plugin-transform-for-of" "^7.10.4" + "@babel/plugin-transform-function-name" "^7.10.4" + "@babel/plugin-transform-literals" "^7.10.4" + "@babel/plugin-transform-member-expression-literals" "^7.10.4" + "@babel/plugin-transform-modules-amd" "^7.10.4" + "@babel/plugin-transform-modules-commonjs" "^7.10.4" + "@babel/plugin-transform-modules-systemjs" "^7.10.4" + "@babel/plugin-transform-modules-umd" "^7.10.4" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.10.4" + "@babel/plugin-transform-new-target" "^7.10.4" + "@babel/plugin-transform-object-super" "^7.10.4" + "@babel/plugin-transform-parameters" "^7.10.4" + "@babel/plugin-transform-property-literals" "^7.10.4" + "@babel/plugin-transform-regenerator" "^7.10.4" + "@babel/plugin-transform-reserved-words" "^7.10.4" + "@babel/plugin-transform-shorthand-properties" "^7.10.4" + "@babel/plugin-transform-spread" "^7.10.4" + "@babel/plugin-transform-sticky-regex" "^7.10.4" + "@babel/plugin-transform-template-literals" "^7.10.4" + "@babel/plugin-transform-typeof-symbol" "^7.10.4" + "@babel/plugin-transform-unicode-escapes" "^7.10.4" + "@babel/plugin-transform-unicode-regex" "^7.10.4" "@babel/preset-modules" "^0.1.3" - "@babel/types" "^7.10.3" + "@babel/types" "^7.10.4" browserslist "^4.12.0" core-js-compat "^3.6.2" invariant "^2.2.2" @@ -871,72 +869,72 @@ esutils "^2.0.2" "@babel/preset-react@^7.7.4": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.10.1.tgz#e2ab8ae9a363ec307b936589f07ed753192de041" - integrity sha512-Rw0SxQ7VKhObmFjD/cUcKhPTtzpeviEFX1E6PgP+cYOhQ98icNqtINNFANlsdbQHrmeWnqdxA4Tmnl1jy5tp3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/plugin-transform-react-display-name" "^7.10.1" - "@babel/plugin-transform-react-jsx" "^7.10.1" - "@babel/plugin-transform-react-jsx-development" "^7.10.1" - "@babel/plugin-transform-react-jsx-self" "^7.10.1" - "@babel/plugin-transform-react-jsx-source" "^7.10.1" - "@babel/plugin-transform-react-pure-annotations" "^7.10.1" + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.10.4.tgz#92e8a66d816f9911d11d4cc935be67adfc82dbcf" + integrity sha512-BrHp4TgOIy4M19JAfO1LhycVXOPWdDbTRep7eVyatf174Hff+6Uk53sDyajqZPu8W1qXRBiYOfIamek6jA7YVw== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-transform-react-display-name" "^7.10.4" + "@babel/plugin-transform-react-jsx" "^7.10.4" + "@babel/plugin-transform-react-jsx-development" "^7.10.4" + "@babel/plugin-transform-react-jsx-self" "^7.10.4" + "@babel/plugin-transform-react-jsx-source" "^7.10.4" + "@babel/plugin-transform-react-pure-annotations" "^7.10.4" "@babel/preset-typescript@^7.3.3": - version "7.10.1" - resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.10.1.tgz#a8d8d9035f55b7d99a2461a0bdc506582914d07e" - integrity sha512-m6GV3y1ShiqxnyQj10600ZVOFrSSAa8HQ3qIUk2r+gcGtHTIRw0dJnFLt1WNXpKjtVw7yw1DAPU/6ma2ZvgJuA== + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.10.4.tgz#7d5d052e52a682480d6e2cc5aa31be61c8c25e36" + integrity sha512-SdYnvGPv+bLlwkF2VkJnaX/ni1sMNetcGI1+nThF1gyv6Ph8Qucc4ZZAjM5yZcE/AKRXIOTZz7eSRDWOEjPyRQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.1" - "@babel/plugin-transform-typescript" "^7.10.1" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-transform-typescript" "^7.10.4" -"@babel/runtime-corejs3@^7.10.2", "@babel/runtime-corejs3@^7.8.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.10.3.tgz#931ed6941d3954924a7aa967ee440e60c507b91a" - integrity sha512-HA7RPj5xvJxQl429r5Cxr2trJwOfPjKiqhCXcdQPSqO2G0RHPZpXu4fkYmBaTKCp2c/jRaMK9GB/lN+7zvvFPw== +"@babel/runtime-corejs3@^7.10.2": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.10.5.tgz#a57fe6c13045ca33768a2aa527ead795146febe1" + integrity sha512-RMafpmrNB5E/bwdSphLr8a8++9TosnyJp98RZzI6VOx2R2CCMpsXXXRvmI700O9oEKpXdZat6oEK68/F0zjd4A== dependencies: core-js-pure "^3.0.0" regenerator-runtime "^0.13.4" "@babel/runtime@^7.0.0", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.3.tgz#670d002655a7c366540c67f6fd3342cd09500364" - integrity sha512-RzGO0RLSdokm9Ipe/YD+7ww8X2Ro79qiXZF3HU9ljrM+qnJmH1Vqth+hbiQZy761LnMJTMitHDuKVYTk3k4dLw== + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.5.tgz#303d8bd440ecd5a491eae6117fd3367698674c5c" + integrity sha512-otddXKhdNn7d0ptoFRHtMLa8LqDxLYwTjB4nYgM1yy5N6gU/MUf8zqyyLltCH3yAVitBzmwK4us+DD0l/MauAg== dependencies: regenerator-runtime "^0.13.4" -"@babel/template@^7.10.1", "@babel/template@^7.10.3", "@babel/template@^7.3.3": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.3.tgz#4d13bc8e30bf95b0ce9d175d30306f42a2c9a7b8" - integrity sha512-5BjI4gdtD+9fHZUsaxPHPNpwa+xRkDO7c7JbhYn2afvrkDu5SfAAbi9AIMXw2xEhO/BR35TqiW97IqNvCo/GqA== - dependencies: - "@babel/code-frame" "^7.10.3" - "@babel/parser" "^7.10.3" - "@babel/types" "^7.10.3" - -"@babel/traverse@^7.1.0", "@babel/traverse@^7.10.1", "@babel/traverse@^7.10.3", "@babel/traverse@^7.7.0": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.10.3.tgz#0b01731794aa7b77b214bcd96661f18281155d7e" - integrity sha512-qO6623eBFhuPm0TmmrUFMT1FulCmsSeJuVGhiLodk2raUDFhhTECLd9E9jC4LBIWziqt4wgF6KuXE4d+Jz9yug== - dependencies: - "@babel/code-frame" "^7.10.3" - "@babel/generator" "^7.10.3" - "@babel/helper-function-name" "^7.10.3" - "@babel/helper-split-export-declaration" "^7.10.1" - "@babel/parser" "^7.10.3" - "@babel/types" "^7.10.3" +"@babel/template@^7.10.4", "@babel/template@^7.3.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.4.tgz#3251996c4200ebc71d1a8fc405fba940f36ba278" + integrity sha512-ZCjD27cGJFUB6nmCB1Enki3r+L5kJveX9pq1SvAUKoICy6CZ9yD8xO086YXdYhvNjBdnekm4ZnaP5yC8Cs/1tA== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/parser" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/traverse@^7.1.0", "@babel/traverse@^7.10.4", "@babel/traverse@^7.10.5", "@babel/traverse@^7.7.0": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.10.5.tgz#77ce464f5b258be265af618d8fddf0536f20b564" + integrity sha512-yc/fyv2gUjPqzTz0WHeRJH2pv7jA9kA7mBX2tXl/x5iOE81uaVPuGPtaYk7wmkx4b67mQ7NqI8rmT2pF47KYKQ== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.10.5" + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.10.4" + "@babel/parser" "^7.10.5" + "@babel/types" "^7.10.5" debug "^4.1.0" globals "^11.1.0" - lodash "^4.17.13" + lodash "^4.17.19" -"@babel/types@^7.0.0", "@babel/types@^7.10.1", "@babel/types@^7.10.3", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4", "@babel/types@^7.7.0": - version "7.10.3" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.10.3.tgz#6535e3b79fea86a6b09e012ea8528f935099de8e" - integrity sha512-nZxaJhBXBQ8HVoIcGsf9qWep3Oh3jCENK54V4mRF7qaJabVsAYdbTtmSD8WmAp1R6ytPiu5apMwSXyxB1WlaBA== +"@babel/types@^7.0.0", "@babel/types@^7.10.4", "@babel/types@^7.10.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4", "@babel/types@^7.7.0": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.10.5.tgz#d88ae7e2fde86bfbfe851d4d81afa70a997b5d15" + integrity sha512-ixV66KWfCI6GKoA/2H9v6bQdbfXEwwpOdQ8cRvb4F+eyvhlaHxWFMQB4+3d9QFJXZsiiiqVrewNV0DFEQpyT4Q== dependencies: - "@babel/helper-validator-identifier" "^7.10.3" - lodash "^4.17.13" + "@babel/helper-validator-identifier" "^7.10.4" + lodash "^4.17.19" to-fast-properties "^2.0.0" "@bcoe/v8-coverage@^0.2.3": @@ -1250,32 +1248,33 @@ fastq "^1.6.0" "@sinonjs/commons@^1.7.0": - version "1.8.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.0.tgz#c8d68821a854c555bba172f3b06959a0039b236d" - integrity sha512-wEj54PfsZ5jGSwMX68G8ZXFawcSglQSXqCftWX3ec8MDUzQdHgcKvw97awHbY0efQEL5iKUOAmmVtoYgmrSG4Q== + version "1.8.1" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.1.tgz#e7df00f98a203324f6dc7cc606cad9d4a8ab2217" + integrity sha512-892K+kWUUi3cl+LlqEWIDrhvLgdL79tECi8JZUyq6IviKy/DNhuzCRlbHUjxK89f4ypPMMaFnFuR9Ie6DoIMsw== dependencies: type-detect "4.0.8" "@testing-library/dom@^7.17.1": - version "7.18.1" - resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.18.1.tgz#c49530410fb184522b3b59c4f9cd6397dc5b462d" - integrity sha512-tGq4KAFjaI7j375sMM1RRVleWA0viJWs/w69B+nyDkqYLNkhdTHdV6mGkspJlkn3PUfyBDi3rERDv4PA/LrpVA== + version "7.21.1" + resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.21.1.tgz#c59f50fddc33db34547a7860f969dbede110623d" + integrity sha512-BVFZeCtZ4cbFqOr/T8rS8q8tfK998SZeC0VcBUGBp3uEr2NVjPaImnzHPJWUx3A+JQqT01aG60SZ7kuyuZCZUQ== dependencies: "@babel/runtime" "^7.10.3" + "@types/aria-query" "^4.2.0" aria-query "^4.2.2" - dom-accessibility-api "^0.4.5" + dom-accessibility-api "^0.4.6" pretty-format "^25.5.0" "@testing-library/jest-dom@^5.5.0": - version "5.11.0" - resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.11.0.tgz#1439f08dc85ce7c6d3bbad0ee5d53b2206f55768" - integrity sha512-mhaCySy7dZlyfcxcYy+0jLllODHEiHkVdmwQ00wD0HrWiSx0fSVHz/0WmdlRkvhfSOuqsRsBUreXOtBvruWGQA== + version "5.11.1" + resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.11.1.tgz#b9541d7625cec9e5feb647f49a96c43f7c055cdd" + integrity sha512-NHOHjDwyBoqM7mXjNLieSp/6vJ17DILzhNTw7+RarluaBkyWRzWgFj+d6xnd1adMBlwfQSeR2FWGTxHXCxeMSA== dependencies: "@babel/runtime" "^7.9.2" "@types/testing-library__jest-dom" "^5.9.1" aria-query "^4.2.2" chalk "^3.0.0" - css "^2.2.4" + css "^3.0.0" css.escape "^1.5.1" jest-diff "^25.1.0" jest-matcher-utils "^25.1.0" @@ -1283,9 +1282,9 @@ redent "^3.0.0" "@testing-library/react@^10.0.3": - version "10.4.3" - resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-10.4.3.tgz#c6f356688cffc51f6b35385583d664bb11a161f4" - integrity sha512-A/ydYXcwAcfY7vkPrfUkUTf9HQLL3/GtixTefcu3OyGQtAYQ7XBQj1S9FWbLEhfWa0BLwFwTBFS3Ao1O0tbMJg== + version "10.4.7" + resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-10.4.7.tgz#fc14847fb70a5e93576b8f7f0d1490ead02a9061" + integrity sha512-hUYbum3X2f1ZKusKfPaooKNYqE/GtPiQ+D2HJaJ4pkxeNJQFVUEvAvEh9+3QuLdBeTWkDMNY5NSijc5+pGdM4Q== dependencies: "@babel/runtime" "^7.10.3" "@testing-library/dom" "^7.17.1" @@ -1300,6 +1299,11 @@ resolved "https://registry.yarnpkg.com/@types/angular/-/angular-1.6.50.tgz#8b6599088d80f68ef0cad7d3a2062248ebe72b3d" integrity sha512-D3KB0PdaxdwtA44yOpK+NtptTscKWgUzXmf8fiLaaVxnX+b7QQ+dUMsyeVDweCQ6VX4PMwkd6x2hJ0X+ISIsoQ== +"@types/aria-query@^4.2.0": + version "4.2.0" + resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.0.tgz#14264692a9d6e2fa4db3df5e56e94b5e25647ac0" + integrity sha512-iIgQNzCm0v7QMhhe4Jjn9uRh+I6GoPmt03CbEtwx3ao8/EfoQcmgtqH4vQ5Db/lxiIGaWDv6nwvunuh0RyX0+A== + "@types/babel__core@^7.1.7": version "7.1.9" resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.9.tgz#77e59d438522a6fb898fa43dc3455c6e72f3963d" @@ -1327,9 +1331,9 @@ "@babel/types" "^7.0.0" "@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": - version "7.0.12" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.0.12.tgz#22f49a028e69465390f87bb103ebd61bd086b8f5" - integrity sha512-t4CoEokHTfcyfb4hUaF9oOHu9RmmNWnm1CP0YmMqOOfClKascOmvlEM736vlqeScuGvBDsHkf8R2INd4DWreQA== + version "7.0.13" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.0.13.tgz#1874914be974a492e1b4cb00585cabb274e8ba18" + integrity sha512-i+zS7t6/s9cdQvbqKDARrcbrPvtJGlbYsMkazo03nTAK3RX9FNrLllXys22uiTGJapPOTZTQ35nHh4ISph4SLQ== dependencies: "@babel/types" "^7.3.0" @@ -1344,9 +1348,9 @@ integrity sha512-qS0VHlL6eBUUoUeBnI/ASCffoniS62zdV6IUtLSIjGKmRhZNawotaOMsTYivZOTZVktfe9koAJkD9XFac7tEEg== "@types/cheerio@*": - version "0.22.18" - resolved "https://registry.yarnpkg.com/@types/cheerio/-/cheerio-0.22.18.tgz#19018dceae691509901e339d63edf1e935978fe6" - integrity sha512-Fq7R3fINAPSdUEhOyjG4iVxgHrOnqDJbY0/BUuiN0pvD/rfmZWekVZnv+vcs8TtpA2XF50uv50LaE4EnpEL/Hw== + version "0.22.21" + resolved "https://registry.yarnpkg.com/@types/cheerio/-/cheerio-0.22.21.tgz#5e37887de309ba11b2e19a6e14cad7874b31a8a3" + integrity sha512-aGI3DfswwqgKPiEOTaiHV2ZPC9KEhprpgEbJnv0fZl3SGX0cGgEva1126dGrMC6AJM6v/aihlUgJn9M5DbDZ/Q== dependencies: "@types/node" "*" @@ -1369,9 +1373,9 @@ "@types/react" "*" "@types/glob@^7.1.1": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.2.tgz#06ca26521353a545d94a0adc74f38a59d232c987" - integrity sha512-VgNIkxK+j7Nz5P7jvUZlRvhuPSmsEfS03b0alKcq5V/STUKAa3Plemsn5mrQUO7am6OErJ4rhGEGJbACclrtRA== + version "7.1.3" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.3.tgz#e6ba80f36b7daad2c685acd9266382e68985c183" + integrity sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w== dependencies: "@types/minimatch" "*" "@types/node" "*" @@ -1487,9 +1491,9 @@ integrity sha512-e9wgeY6gaY21on3ve0xAjgBVjGDWq/xUteK0ujsE53bUoxycMkqfnkUgMt6ffZtykZ5X12Mg3T7Pw4TRCObDKg== "@types/node@^13.13.4": - version "13.13.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.12.tgz#9c72e865380a7dc99999ea0ef20fc9635b503d20" - integrity sha512-zWz/8NEPxoXNT9YyF2osqyA9WjssZukYpgI4UYZpOjcyqwIUqWGkcCionaEb9Ki+FULyPyvNFpg/329Kd2/pbw== + version "13.13.14" + resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.14.tgz#20cd7d2a98f0c3b08d379f4ea9e6b315d2019529" + integrity sha512-Az3QsOt1U/K1pbCQ0TXGELTuTkPLOiFIQf3ILzbOyo0FqgV9SxRnxbxM5QlAveERZMHpZY+7u3Jz2tKyl+yg6g== "@types/normalize-package-data@^2.4.0": version "2.4.0" @@ -1584,9 +1588,9 @@ csstype "^2.2.0" "@types/react@^16.3.14": - version "16.9.41" - resolved "https://registry.yarnpkg.com/@types/react/-/react-16.9.41.tgz#925137ee4d2ff406a0ecf29e8e9237390844002e" - integrity sha512-6cFei7F7L4wwuM+IND/Q2cV1koQUvJ8iSV+Gwn0c3kvABZ691g7sp3hfEQHOUBJtccl1gPi+EyNjMIl9nGA0ug== + version "16.9.43" + resolved "https://registry.yarnpkg.com/@types/react/-/react-16.9.43.tgz#c287f23f6189666ee3bebc2eb8d0f84bcb6cdb6b" + integrity sha512-PxshAFcnJqIWYpJbLPriClH53Z2WlJcVZE+NP2etUtWQs2s7yIMj3/LDKZT/5CHJ/F62iyjVCDu2H3jHEXIxSg== dependencies: "@types/prop-types" "*" csstype "^2.2.0" @@ -1859,14 +1863,14 @@ ajv-errors@^1.0.0: integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: - version "3.5.0" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.0.tgz#5c894537098785926d71e696114a53ce768ed773" - integrity sha512-eyoaac3btgU8eJlvh01En8OCKzRqlLe2G5jDsCr3RiE2uLGMEEB1aaGwVVpwR8M95956tGH6R+9edC++OvzaVw== + version "3.5.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.1.tgz#b83ca89c5d42d69031f424cad49aada0236c6957" + integrity sha512-KWcq3xN8fDjSB+IMoh2VaXVhRI0BBGxoYp3rx7Pkb6z0cFjYR9Q9l4yZqqals0/zsioCmocC5H6UvsGD4MoIBA== ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.5.5: - version "6.12.2" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.2.tgz#c629c5eced17baf314437918d2da88c99d5958cd" - integrity sha512-k+V+hzjm5q/Mr8ef/1Y9goCmlsK4I6Sm74teeyGvFk1XrOsbsKLjEdrvny42CZ+a8sXbk8KWpY/bDwS+FLL2UQ== + version "6.12.3" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.3.tgz#18c5af38a111ddeb4f2697bd78d68abc1cabd706" + integrity sha512-4K0cK3L1hsqk9xIb2z9vs/XU+PGJZ9PNpJRDS9YLzmNdX6jmVPfamLvTJr0aDAusnHyCHO6MjzlkAsgtqp9teA== dependencies: fast-deep-equal "^3.1.1" fast-json-stable-stringify "^2.0.0" @@ -1885,10 +1889,10 @@ ansi-colors@^1.0.1: dependencies: ansi-wrap "^0.1.0" -ansi-colors@^3.2.1: - version "3.2.4" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" - integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== +ansi-colors@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== ansi-escapes@^1.1.0: version "1.4.0" @@ -2077,6 +2081,15 @@ array.prototype.flat@^1.2.3: define-properties "^1.1.3" es-abstract "^1.17.0-next.1" +array.prototype.flatmap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.2.3.tgz#1c13f84a178566042dd63de4414440db9222e443" + integrity sha512-OOEk+lkePcg+ODXIpvuU9PAryCikCJyo7GlDG1upleEpQRx6mzL9puEBkozQ5iAx20KV0l3DbyQwqciJtqe5Pg== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.0-next.1" + function-bind "^1.1.1" + arrify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" @@ -2303,9 +2316,9 @@ binary-extensions@^1.0.0: integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binary-extensions@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c" - integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow== + version "2.1.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9" + integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ== bindings@^1.5.0: version "1.5.0" @@ -2451,12 +2464,12 @@ browserify-zlib@^0.2.0: pako "~1.0.5" browserslist@^4.12.0, browserslist@^4.8.5: - version "4.12.1" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.12.1.tgz#6d08bef149b70d153930780ba762644e0f329122" - integrity sha512-WMjXwFtPskSW1pQUDJRxvRKRkeCr7usN0O/Za76N+F4oadaTdQHotSGcX9jT/Hs7mSKPkyMFNvqawB/1HzYDKQ== + version "4.13.0" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.13.0.tgz#42556cba011e1b0a2775b611cba6a8eca18e940d" + integrity sha512-MINatJ5ZNrLnQ6blGvePd/QOz9Xtu+Ne+x29iQSCHfkU5BugKVJwZKn/iiL8UbpIpa3JhviKjz+XxMo0m2caFQ== dependencies: - caniuse-lite "^1.0.30001088" - electron-to-chromium "^1.3.481" + caniuse-lite "^1.0.30001093" + electron-to-chromium "^1.3.488" escalade "^3.0.1" node-releases "^1.1.58" @@ -2577,10 +2590,10 @@ camelcase@^5.0.0, camelcase@^5.3.1: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -caniuse-lite@^1.0.30001088: - version "1.0.30001088" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001088.tgz#23a6b9e192106107458528858f2c0e0dba0d9073" - integrity sha512-6eYUrlShRYveyqKG58HcyOfPgh3zb2xqs7NvT2VVtP3hEUeeWvc3lqhpeMTxYWBBeeaT9A4bKsrtjATm66BTHg== +caniuse-lite@^1.0.30001093: + version "1.0.30001104" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001104.tgz#4e3d5b3b1dd3c3529f10cb7f519c62ba3e579f5d" + integrity sha512-pkpCg7dmI/a7WcqM2yfdOiT4Xx5tzyoHAXWsX5/HxZ3TemwDZs0QXdqbE0UPLPVy/7BeK7693YfzfRYfu1YVpg== capture-exit@^2.0.0: version "2.0.0" @@ -2622,7 +2635,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0: +chalk@^4.0.0, chalk@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== @@ -2655,9 +2668,9 @@ chokidar@^2.1.8: fsevents "^1.2.7" chokidar@^3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.0.tgz#b30611423ce376357c765b9b8f904b9fba3c0be8" - integrity sha512-aXAaho2VJtisB/1fg1+3nlLJqGOuewTzQpd/Tz0yTg2R0e4IGtshYvtjowyEumcBv2z+y4+kc75Mz7j5xJskcQ== + version "3.4.1" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.1.tgz#e905bdecf10eaa0a0b1db0c664481cc4cbc22ba1" + integrity sha512-TQTJyr2stihpC4Sya9hs2Xh+O2wf+igjL36Y75xx2WdHuiICcn/XJza46Jwt0eT5hVpQOzo3FpY3cj3RVYLX0g== dependencies: anymatch "~3.1.1" braces "~3.0.2" @@ -2748,6 +2761,11 @@ cli-width@^2.0.0: resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" integrity sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw== +cli-width@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" + integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== + cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" @@ -3067,15 +3085,14 @@ css.escape@^1.5.1: resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s= -css@^2.2.4: - version "2.2.4" - resolved "https://registry.yarnpkg.com/css/-/css-2.2.4.tgz#c646755c73971f2bba6a601e2cf2fd71b1298929" - integrity sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw== +css@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" + integrity sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== dependencies: - inherits "^2.0.3" + inherits "^2.0.4" source-map "^0.6.1" - source-map-resolve "^0.5.2" - urix "^0.1.0" + source-map-resolve "^0.6.0" cssom@^0.4.1: version "0.4.4" @@ -3095,9 +3112,9 @@ cssstyle@^2.0.0: cssom "~0.3.6" csstype@^2.2.0, csstype@^2.6.7: - version "2.6.10" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.10.tgz#e63af50e66d7c266edb6b32909cfd0aabe03928b" - integrity sha512-D34BqZU4cIlMCY93rZHbrq9pjTAQJ3U8S8rfBqjwHxkGPThWFjzZDQpgMJY0QViLxth6ZKYiwFBo14RdN44U/w== + version "2.6.11" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.11.tgz#452f4d024149ecf260a852b025e36562a253ffc5" + integrity sha512-l8YyEC9NBkSm783PFTvh0FmJy7s5pFKrDp49ZL7zBGX3fWkO+N4EEyan1qqp8cwPLDcD0OSdyY6hAMoxp34JFw== currently-unhandled@^0.4.1: version "0.4.1" @@ -3310,10 +3327,10 @@ doctrine@^3.0.0: dependencies: esutils "^2.0.2" -dom-accessibility-api@^0.4.5: - version "0.4.5" - resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.4.5.tgz#d9c1cefa89f509d8cf132ab5d250004d755e76e3" - integrity sha512-HcPDilI95nKztbVikaN2vzwvmv0sE8Y2ZJFODy/m15n7mGXLeOKGiys9qWVbFbh+aq/KYj2lqMLybBOkYAEXqg== +dom-accessibility-api@^0.4.6: + version "0.4.6" + resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.4.6.tgz#f3f2af68aee01b1c862f37918d41841bb1aaf92a" + integrity sha512-qxFVFR/ymtfamEQT/AsYLe048sitxFCoCHiM+vuOdR3fE94i3so2SCFJxyz/RxV69PZ+9FgToYWOd7eqJqcbYw== dom-helpers@^5.0.0: version "5.1.4" @@ -3353,10 +3370,10 @@ ecc-jsbn@~0.1.1: jsbn "~0.1.0" safer-buffer "^2.1.0" -electron-to-chromium@^1.3.481: - version "1.3.483" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.483.tgz#9269e7cfc1c8e72709824da171cbe47ca5e3ca9e" - integrity sha512-+05RF8S9rk8S0G8eBCqBRBaRq7+UN3lDs2DAvnG8SBSgQO3hjy0+qt4CmRk5eiuGbTcaicgXfPmBi31a+BD3lg== +electron-to-chromium@^1.3.488: + version "1.3.501" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.501.tgz#faa17a2cb0105ee30d5e1ca87eae7d8e85dd3175" + integrity sha512-tyzuKaV2POw2mtqBBzQGNBojMZzH0MRu8bT8T/50x+hWeucyG/9pkgAATy+PcM2ySNM9+8eG2VllY9c6j4i+bg== elliptic@^6.0.0, elliptic@^6.5.2: version "6.5.3" @@ -3392,11 +3409,11 @@ emojis-list@^3.0.0: integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== encoding@^0.1.11: - version "0.1.12" - resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.12.tgz#538b66f3ee62cd1ab51ec323829d1f9480c74beb" - integrity sha1-U4tm8+5izRq1HsMjgp0flIDHS+s= + version "0.1.13" + resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" + integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== dependencies: - iconv-lite "~0.4.13" + iconv-lite "^0.6.2" end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" @@ -3406,9 +3423,9 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0: once "^1.4.0" enhanced-resolve@^4.0.0, enhanced-resolve@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.2.0.tgz#5d43bda4a0fd447cb0ebbe71bef8deff8805ad0d" - integrity sha512-S7eiFb/erugyd1rLb6mQ3Vuq+EXHv5cpCkNqqIkYkBgN2QdFnyCZzFBleqwGEx4lgNGYij81BWnCrFNK7vxvjQ== + version "4.3.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.3.0.tgz#3b806f3bfafc1ec7de69551ef93cca46c1704126" + integrity sha512-3e87LvavsdxyoCfGusJnrZ5G8SLPOFeHSNpZI/ATL9a5leXo2k0w6MKnbqhdBad9qTobSfB20Ld7UmgoNbAZkQ== dependencies: graceful-fs "^4.1.2" memory-fs "^0.5.0" @@ -3424,11 +3441,11 @@ enhanced-resolve@~0.9.0: tapable "^0.1.8" enquirer@^2.3.5: - version "2.3.5" - resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.5.tgz#3ab2b838df0a9d8ab9e7dff235b0e8712ef92381" - integrity sha512-BNT1C08P9XD0vNg3J475yIUG+mVdp9T6towYFHUv897X0KoHBjB1shyrNmhmtHWKP17iSWgo7Gqh7BBuzLZMSA== + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== dependencies: - ansi-colors "^3.2.1" + ansi-colors "^4.1.1" errno@^0.1.3, errno@~0.1.7: version "0.1.7" @@ -3471,9 +3488,9 @@ es-to-primitive@^1.2.1: is-symbol "^1.0.2" escalade@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.0.1.tgz#52568a77443f6927cd0ab9c73129137533c965ed" - integrity sha512-DR6NO3h9niOT+MZs7bjxlj2a1k+POu5RN8CLTPX2+i78bRi9eLe7+0zXgUHMnGXWybYcL61E9hGhPKqedy8tQA== + version "3.0.2" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.0.2.tgz#6a580d70edb87880f22b4c91d0d56078df6962c4" + integrity sha512-gPYAU37hYCUhW5euPeR+Y74F7BL+IBsV93j5cvGriSaD1aG6MGsqsV1yamRdrWrb2j3aiZvb0X+UBOWpx3JWtQ== escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" @@ -3533,16 +3550,16 @@ eslint-module-utils@^2.6.0: pkg-dir "^2.0.0" eslint-plugin-babel@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-babel/-/eslint-plugin-babel-5.3.0.tgz#2e7f251ccc249326da760c1a4c948a91c32d0023" - integrity sha512-HPuNzSPE75O+SnxHIafbW5QB45r2w78fxqwK3HmjqIUoPfPzVrq6rD+CINU3yzoDSzEhUkX07VUphbF73Lth/w== + version "5.3.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-babel/-/eslint-plugin-babel-5.3.1.tgz#75a2413ffbf17e7be57458301c60291f2cfbf560" + integrity sha512-VsQEr6NH3dj664+EyxJwO4FCYm/00JhYb3Sk3ft8o+fpKuIfQ9TaW6uVUfvwMXHcf/lsnRIoyFPsLMyiWCSL/g== dependencies: eslint-rule-composer "^0.3.0" eslint-plugin-import@^2.14.0: - version "2.21.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.21.2.tgz#8fef77475cc5510801bedc95f84b932f7f334a7c" - integrity sha512-FEmxeGI6yaz+SnEB6YgNHlQK1Bs2DKLM+YF+vuTk5H8J9CLbJLtlPvRFgZZ2+sXiKAlN5dpdlrWOjK8ZoZJpQA== + version "2.22.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.0.tgz#92f7736fe1fde3e2de77623c838dd992ff5ffb7e" + integrity sha512-66Fpf1Ln6aIS5Gr/55ts19eUuoDhAbZgnr6UxK5hbDx6l/QgQgx61AePq+BV4PP2uXQFClgMVzep5zZ94qqsxg== dependencies: array-includes "^3.1.1" array.prototype.flat "^1.2.3" @@ -3559,9 +3576,9 @@ eslint-plugin-import@^2.14.0: tsconfig-paths "^3.9.0" eslint-plugin-jest@^23.8.2: - version "23.17.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-23.17.1.tgz#c0f39ba78e0f33b7ee1ce4ec92b773e39026ea3f" - integrity sha512-/o36fw67qNbJGWbSBIBMfseMsNP/d88WUHAGHCi1xFwsNB3XXZGdvxbOw49j3iQz6MCW/yw8OeOsuQhi6mM5ZA== + version "23.18.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-23.18.0.tgz#4813eacb181820ed13c5505f400956d176b25af8" + integrity sha512-wLPM/Rm1SGhxrFQ2TKM/BYsYPhn7ch6ZEK92S2o/vGkAAnDXM0I4nTIo745RIX+VlCRMFgBuJEax6XfTHMdeKg== dependencies: "@typescript-eslint/experimental-utils" "^2.5.0" @@ -3609,21 +3626,21 @@ eslint-plugin-prettier@^2.2.0: jest-docblock "^21.0.0" eslint-plugin-react@^7.11.1: - version "7.20.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.20.0.tgz#f98712f0a5e57dfd3e5542ef0604b8739cd47be3" - integrity sha512-rqe1abd0vxMjmbPngo4NaYxTcR3Y4Hrmc/jg4T+sYz63yqlmJRknpEQfmWY+eDWPuMmix6iUIK+mv0zExjeLgA== + version "7.20.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.20.3.tgz#0590525e7eb83890ce71f73c2cf836284ad8c2f1" + integrity sha512-txbo090buDeyV0ugF3YMWrzLIUqpYTsWSDZV9xLSmExE1P/Kmgg9++PD931r+KEWS66O1c9R4srLVVHmeHpoAg== dependencies: array-includes "^3.1.1" + array.prototype.flatmap "^1.2.3" doctrine "^2.1.0" has "^1.0.3" - jsx-ast-utils "^2.2.3" - object.entries "^1.1.1" + jsx-ast-utils "^2.4.1" + object.entries "^1.1.2" object.fromentries "^2.0.2" object.values "^1.1.1" prop-types "^15.7.2" - resolve "^1.15.1" + resolve "^1.17.0" string.prototype.matchall "^4.0.2" - xregexp "^4.3.0" eslint-rule-composer@^0.3.0: version "0.3.0" @@ -3792,9 +3809,9 @@ execa@^3.2.0: strip-final-newline "^2.0.0" execa@^4.0.1, execa@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/execa/-/execa-4.0.2.tgz#ad87fb7b2d9d564f70d2b62d511bee41d5cbb240" - integrity sha512-QI2zLa6CjGWdiQsmSkZoGtDx2N+cQIGb3yNolGTdjSQzydzLgYYf8LRuagp7S7fPimjcrzUDSUFd/MgzELMi4Q== + version "4.0.3" + resolved "https://registry.yarnpkg.com/execa/-/execa-4.0.3.tgz#0a34dabbad6d66100bd6f2c576c8669403f317f2" + integrity sha512-WFDXGHckXPWZX19t1kCsXzOpqX9LWYNqn4C+HqZlk/V0imTkzJZqf87ZBhvpHaftERYknpk0fjSylnXVlVgI0A== dependencies: cross-spawn "^7.0.0" get-stream "^5.0.0" @@ -4592,13 +4609,20 @@ husky@^4.2.5: slash "^3.0.0" which-pm-runs "^1.0.0" -iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@~0.4.13: +iconv-lite@0.4.24, iconv-lite@^0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" +iconv-lite@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.2.tgz#ce13d1875b0c3a674bd6a04b7f76b01b1b6ded01" + integrity sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + ieee754@^1.1.4: version "1.1.13" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" @@ -4711,20 +4735,20 @@ inquirer@^1.2.2: through "^2.3.6" inquirer@^7.0.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.2.0.tgz#63ce99d823090de7eb420e4bb05e6f3449aa389a" - integrity sha512-E0c4rPwr9ByePfNlTIB8z51kK1s2n6jrHuJeEHENl/sbq2G/S1auvibgEwNR4uSyiU+PiYHqSwsgGiXjG8p5ZQ== + version "7.3.2" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.2.tgz#25245d2e32dc9f33dbe26eeaada231daa66e9c7c" + integrity sha512-DF4osh1FM6l0RJc5YWYhSDB6TawiBRlbV9Cox8MWlidU218Tb7fm3lQTULyUJDfJ0tjbzl0W4q651mrCCEM55w== dependencies: ansi-escapes "^4.2.1" - chalk "^3.0.0" + chalk "^4.1.0" cli-cursor "^3.1.0" - cli-width "^2.0.0" + cli-width "^3.0.0" external-editor "^3.0.3" figures "^3.0.0" - lodash "^4.17.15" + lodash "^4.17.16" mute-stream "0.0.8" run-async "^2.4.0" - rxjs "^6.5.3" + rxjs "^6.6.0" string-width "^4.1.0" strip-ansi "^6.0.0" through "^2.3.6" @@ -5522,9 +5546,9 @@ jest@^25.5.0: jest-cli "^25.5.4" js-base64@^2.1.8: - version "2.6.2" - resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.6.2.tgz#cf9301bc5cc756892a9a6c8d7138322e5944fb0d" - integrity sha512-1hgLrLIrmCgZG+ID3VoLNLOSwjGnoZa8tyrUdEteMeIzsT6PH7PMLyUvbDwzNE56P3PNxyvuIOx4Uh2E5rzQIw== + version "2.6.3" + resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.6.3.tgz#7afdb9b57aa7717e15d370b66e8f36a9cb835dc3" + integrity sha512-fiUvdfCaAXoQTHdKMgTvg6IkecXDcVz6V5rlftUTclF9IKBjMizvSdQaCl/z/6TApDeby5NL+axYou3i0mu1Pg== "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" @@ -5635,7 +5659,7 @@ jsprim@^1.2.2: json-schema "0.2.3" verror "1.10.0" -jsx-ast-utils@^2.2.3, jsx-ast-utils@^2.4.1: +jsx-ast-utils@^2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-2.4.1.tgz#1114a4c1209481db06c690c2b4f488cc665f657e" integrity sha512-z1xSldJ6imESSzOjd3NNkieVJKRlKYSOtMG8SFyCj2FIrvSaSuli/WjpBkEzCBoR9bYYYFgqJw61Xhu7Lcgk+w== @@ -5750,9 +5774,9 @@ lint-staged@^10.2.0: stringify-object "^3.3.0" listr2@^2.1.0: - version "2.1.8" - resolved "https://registry.yarnpkg.com/listr2/-/listr2-2.1.8.tgz#8af7ebc70cdbe866ddbb6c80909142bd45758f1f" - integrity sha512-Op+hheiChfAphkJ5qUxZtHgyjlX9iNnAeFS/S134xw7mVSg0YVrQo1IY4/K+ElY6XgOPg2Ij4z07urUXR+YEew== + version "2.2.1" + resolved "https://registry.yarnpkg.com/listr2/-/listr2-2.2.1.tgz#3a0abf78a7a9d9fb4121a541b524cb52e8dcbbba" + integrity sha512-WhuhT7xpVi2otpY/OzJJ8DQhf6da8MjGiEhMdA9oQquwtsSfzZt+YKlasUBer717Uocd0oPmbPeiTD7MvGzctw== dependencies: chalk "^4.0.0" cli-truncate "^2.1.0" @@ -5841,10 +5865,10 @@ lodash.sortby@^4.7.0: resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= -lodash@^4.0.0, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.4, lodash@^4.3.0, lodash@~4.17.10: - version "4.17.15" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" - integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== +lodash@^4.0.0, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.16, lodash@^4.17.19, lodash@^4.17.4, lodash@^4.3.0, lodash@~4.17.10: + version "4.17.19" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" + integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== "lodash@npm:@elastic/lodash@3.10.1-kibana4": version "3.10.1-kibana4" @@ -6195,9 +6219,9 @@ natural-compare@^1.4.0: integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= neo-async@^2.5.0, neo-async@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" - integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== nice-try@^1.0.4: version "1.0.5" @@ -6286,9 +6310,9 @@ node-notifier@^6.0.0: which "^1.3.1" node-releases@^1.1.58: - version "1.1.58" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.58.tgz#8ee20eef30fa60e52755fcc0942def5a734fe935" - integrity sha512-NxBudgVKiRh/2aPWMgPR7bPTX0VPmGx5QBwCtdHitnqFE5/O8DeBXuIMH1nwNnw/aMo6AjOrpsHzfY3UbUJ7yg== + version "1.1.59" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.59.tgz#4d648330641cec704bff10f8e4fe28e453ab8e8e" + integrity sha512-H3JrdUczbdiwxN5FuJPyCHnGHIFqQ0wWxo+9j1kAXAzqNMAHlo+4I/sYYxpyK0irQ73HgdiyzD32oqQDcU2Osw== node-sass@^4.13.1: version "4.14.1" @@ -6314,9 +6338,9 @@ node-sass@^4.13.1: "true-case-path" "^1.0.2" node@^14.0.0: - version "14.4.0" - resolved "https://registry.yarnpkg.com/node/-/node-14.4.0.tgz#be38ed681ec8f3d72637311453eb409a485ce982" - integrity sha512-uJ9LXT9OjBEZaFtyVxSPxLfVCPZ9TPUtyqqxSyDazj2Vj40S9sL3b1hKnctktnwTG8IMqDsUnQ6HOplDS1RuMQ== + version "14.5.0" + resolved "https://registry.yarnpkg.com/node/-/node-14.5.0.tgz#9b41f8e6790225a6ffae0ff3e355cad55c745c65" + integrity sha512-WxjuzzP5rI3yM/TUdajF4FzKUorUYIOtcNlsMh9a/JX5YsCBS/I2PdEe03F80Nnyfd9qZTjKnTV9XuQuuOYc6Q== dependencies: node-bin-setup "^1.0.0" @@ -6441,7 +6465,7 @@ object.assign@^4.0.4, object.assign@^4.1.0: has-symbols "^1.0.0" object-keys "^1.0.11" -object.entries@^1.1.1: +object.entries@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.2.tgz#bc73f00acb6b6bb16c203434b10f9a7e797d3add" integrity sha512-BQdB9qKmb/HyNdMNWVr7O3+z5MUIx3aiegEIJqjMBbBf0YT9RRxTJSim4mzFqtyr7PDAHigq0N9dO0m0tRakQA== @@ -6890,11 +6914,6 @@ pretty-format@^25.2.1, pretty-format@^25.5.0: ansi-styles "^4.0.0" react-is "^16.12.0" -private@^0.1.8: - version "0.1.8" - resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" - integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== - process-nextick-args@^2.0.0, process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" @@ -7348,12 +7367,11 @@ regenerator-runtime@^0.13.4: integrity sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA== regenerator-transform@^0.14.2: - version "0.14.4" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.4.tgz#5266857896518d1616a78a0479337a30ea974cc7" - integrity sha512-EaJaKPBI9GvKpvUz2mz4fhx7WPgvwRLY9v3hlNHWmAuJHI13T4nwKnNvm5RWJzEdnI5g5UwtOww+S8IdoUC2bw== + version "0.14.5" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.5.tgz#c98da154683671c9c4dcb16ece736517e1b7feb4" + integrity sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw== dependencies: "@babel/runtime" "^7.8.4" - private "^0.1.8" regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" @@ -7535,7 +7553,7 @@ resolve@1.1.7: resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= -resolve@^1.10.0, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.15.1, resolve@^1.17.0, resolve@^1.3.2, resolve@^1.5.0, resolve@^1.7.1, resolve@^1.8.1: +resolve@^1.10.0, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.17.0, resolve@^1.3.2, resolve@^1.5.0, resolve@^1.7.1, resolve@^1.8.1: version "1.17.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== @@ -7624,10 +7642,10 @@ rx@^4.1.0: resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" integrity sha1-pfE/957zt0D+MKqAP7CfmIBdR4I= -rxjs@^6.5.3, rxjs@^6.5.5: - version "6.5.5" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.5.5.tgz#c5c884e3094c8cfee31bf27eb87e54ccfc87f9ec" - integrity sha512-WfQI+1gohdf0Dai/Bbmk5L5ItH5tYqm3ki2c5GdWhKjalzjg93N3avFjVStyZZz+A2Em+ZxKH5bNghw9UeylGQ== +rxjs@^6.5.5, rxjs@^6.6.0: + version "6.6.0" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.0.tgz#af2901eedf02e3a83ffa7f886240ff9018bbec84" + integrity sha512-3HMA8z/Oz61DUHe+SdOiQyzIf4tOx5oQHmMir7IZEu6TMqCLHT4LRcmNaUS0NwOz8VLvmmBduMsoaUvMaIiqzg== dependencies: tslib "^1.9.0" @@ -7648,7 +7666,7 @@ safe-regex@^1.1.0: dependencies: ret "~0.1.10" -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== @@ -7904,7 +7922,7 @@ source-list-map@^2.0.0: resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== -source-map-resolve@^0.5.0, source-map-resolve@^0.5.2: +source-map-resolve@^0.5.0: version "0.5.3" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== @@ -7915,6 +7933,14 @@ source-map-resolve@^0.5.0, source-map-resolve@^0.5.2: source-map-url "^0.4.0" urix "^0.1.0" +source-map-resolve@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" + integrity sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + source-map-support@^0.5.17, source-map-support@^0.5.6, source-map-support@~0.5.12: version "0.5.19" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" @@ -8244,9 +8270,9 @@ strip-indent@^3.0.0: min-indent "^1.0.0" strip-json-comments@^3.0.1: - version "3.1.0" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.0.tgz#7638d31422129ecf4457440009fba03f9f9ac180" - integrity sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w== + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== supports-color@^2.0.0: version "2.0.0" @@ -9026,9 +9052,9 @@ whatwg-encoding@^1.0.1, whatwg-encoding@^1.0.5: iconv-lite "0.4.24" whatwg-fetch@>=0.10.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.0.0.tgz#fc804e458cc460009b1a2b966bc8817d2578aefb" - integrity sha512-9GSJUgz1D4MfyKU7KRqwOjXCXTqWdFNvEr7eUBYchQiVc744mqK/MzXPNR2WsPkmkOa4ywfg8C2n8h+13Bey1Q== + version "3.2.0" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.2.0.tgz#8e134f701f0a4ab5fda82626f113e2b647fd16dc" + integrity sha512-SdGPoQMMnzVYThUbSrEvqTlkvC1Ux27NehaJ/GUHBfNrh5Mjg+1/uRyFMwVnxO2MrikMWvWAqUGgQOfVU4hT7w== whatwg-mimetype@^2.2.0, whatwg-mimetype@^2.3.0: version "2.3.0" @@ -9128,9 +9154,9 @@ write@1.0.3: mkdirp "^0.5.1" ws@^7.0.0: - version "7.3.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.3.0.tgz#4b2f7f219b3d3737bc1a2fbf145d825b94d38ffd" - integrity sha512-iFtXzngZVXPGgpTlP1rBqsUK82p9tKqsWRPg5L56egiljujJT3vGAYnHANvFxBieXrTFavhzhxW52jnaWV+w2w== + version "7.3.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.3.1.tgz#d0547bf67f7ce4f12a72dfe31262c68d7dc551c8" + integrity sha512-D3RuNkynyHmEJIpD2qrgVkc9DQ23OrN/moAwZX4L8DfvszsJxpjQuUq3LMx6HoYji9fbIOBY18XWBsAux1ZZUA== xml-name-validator@^3.0.0: version "3.0.0" @@ -9142,13 +9168,6 @@ xmlchars@^2.1.1: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== -xregexp@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/xregexp/-/xregexp-4.3.0.tgz#7e92e73d9174a99a59743f67a4ce879a04b5ae50" - integrity sha512-7jXDIFXh5yJ/orPn4SXjuVrWWoi4Cr8jfV1eHv9CixKSbU+jY4mxfrBwAuDvupPNKpMUY+FeIqsVw/JLT9+B8g== - dependencies: - "@babel/runtime-corejs3" "^7.8.3" - xtend@^4.0.0, xtend@~4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" @@ -9174,7 +9193,7 @@ yaml@^1.7.2: resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.0.tgz#3b593add944876077d4d683fee01081bd9fff31e" integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg== -yargs-parser@18.x, yargs-parser@^18.1.1: +yargs-parser@18.x, yargs-parser@^18.1.2: version "18.1.3" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== @@ -9207,9 +9226,9 @@ yargs@^13.3.2: yargs-parser "^13.1.2" yargs@^15.3.1: - version "15.3.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b" - integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA== + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== dependencies: cliui "^6.0.0" decamelize "^1.2.0" @@ -9221,7 +9240,7 @@ yargs@^15.3.1: string-width "^4.2.0" which-module "^2.0.0" y18n "^4.0.0" - yargs-parser "^18.1.1" + yargs-parser "^18.1.2" yazl@^2.5.1: version "2.5.1" diff --git a/sql/build.gradle b/sql/build.gradle index 56b18c010e..0443ba57c5 100644 --- a/sql/build.gradle +++ b/sql/build.gradle @@ -25,7 +25,8 @@ dependencies { antlr "org.antlr:antlr4:4.7.1" compile "org.antlr:antlr4-runtime:4.7.1" - compile group: 'com.google.guava', name: 'guava', version:'23.0' + // https://github.com/google/guava/wiki/CVE-2018-10237 + implementation group: 'com.google.guava', name: 'guava', version: '29.0-jre' compile group: 'org.json', name: 'json', version:'20180813' compile group: 'org.springframework', name: 'spring-context', version: '5.2.5.RELEASE' compile group: 'org.springframework', name: 'spring-beans', version: '5.2.5.RELEASE' diff --git a/sql/src/main/antlr/OpenDistroSQLIdentifierParser.g4 b/sql/src/main/antlr/OpenDistroSQLIdentifierParser.g4 new file mode 100644 index 0000000000..f29895e522 --- /dev/null +++ b/sql/src/main/antlr/OpenDistroSQLIdentifierParser.g4 @@ -0,0 +1,45 @@ +/* +MySQL (Positive Technologies) grammar +The MIT License (MIT). +Copyright (c) 2015-2017, Ivan Kochurkin (kvanttt@gmail.com), Positive Technologies. +Copyright (c) 2017, Ivan Khudyashev (IHudyashov@ptsecurity.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +*/ + +parser grammar OpenDistroSQLIdentifierParser; + +options { tokenVocab=OpenDistroSQLLexer; } + + +// Identifiers + +tableName + : qualifiedName + ; + +qualifiedName + : ident (DOT ident)* + ; + +ident + : DOT? ID + | DOUBLE_QUOTE_ID + | BACKTICK_QUOTE_ID + ; diff --git a/sql/src/main/antlr/OpenDistroSQLLexer.g4 b/sql/src/main/antlr/OpenDistroSQLLexer.g4 index 61c13a3a30..66e3d1bade 100644 --- a/sql/src/main/antlr/OpenDistroSQLLexer.g4 +++ b/sql/src/main/antlr/OpenDistroSQLLexer.g4 @@ -139,11 +139,14 @@ ATAN: 'ATAN'; ATAN2: 'ATAN2'; CBRT: 'CBRT'; CEIL: 'CEIL'; +CEILING: 'CEILING'; CONCAT: 'CONCAT'; CONCAT_WS: 'CONCAT_WS'; +CONV: 'CONV'; COS: 'COS'; COSH: 'COSH'; COT: 'COT'; +CRC32: 'CRC32'; CURDATE: 'CURDATE'; DATE: 'DATE'; DATE_FORMAT: 'DATE_FORMAT'; @@ -186,7 +189,9 @@ SINH: 'SINH'; SQRT: 'SQRT'; SUBTRACT: 'SUBTRACT'; TAN: 'TAN'; +TIME: 'TIME'; TIMESTAMP: 'TIMESTAMP'; +TRUNCATE: 'TRUNCATE'; UPPER: 'UPPER'; D: 'D'; @@ -290,7 +295,7 @@ COLON_SYMB: ':'; // Literal Primitives START_NATIONAL_STRING_LITERAL: 'N' SQUOTA_STRING; -STRING_LITERAL: DQUOTA_STRING | SQUOTA_STRING | BQUOTA_STRING; +STRING_LITERAL: SQUOTA_STRING; DECIMAL_LITERAL: DEC_DIGIT+; HEXADECIMAL_LITERAL: 'X' '\'' (HEX_DIGIT HEX_DIGIT)+ '\'' | '0X' HEX_DIGIT+; @@ -304,33 +309,17 @@ BIT_STRING: BIT_STRING_L; -// Hack for dotID -// Prevent recognize string: .123somelatin AS ((.123), FLOAT_LITERAL), ((somelatin), ID) -// it must recoginze: .123somelatin AS ((.), DOT), (123somelatin, ID) - -DOT_ID: '.' ID_LITERAL; - - - // Identifiers ID: ID_LITERAL; -// DOUBLE_QUOTE_ID: '"' ~'"'+ '"'; -REVERSE_QUOTE_ID: '`' ~'`'+ '`'; -STRING_USER_NAME: ( - SQUOTA_STRING | DQUOTA_STRING - | BQUOTA_STRING | ID_LITERAL - ) '@' - ( - SQUOTA_STRING | DQUOTA_STRING - | BQUOTA_STRING | ID_LITERAL - ); +DOUBLE_QUOTE_ID: DQUOTA_STRING; +BACKTICK_QUOTE_ID: BQUOTA_STRING; // Fragments for Literal primitives fragment EXPONENT_NUM_PART: 'E' [-+]? DEC_DIGIT+; -fragment ID_LITERAL: [A-Z_$0-9@]*?[A-Z_$]+?[A-Z_$\-0-9]*; +fragment ID_LITERAL: [*A-Z]+?[*A-Z_\-0-9]*; fragment DQUOTA_STRING: '"' ( '\\'. | '""' | ~('"'| '\\') )* '"'; fragment SQUOTA_STRING: '\'' ('\\'. | '\'\'' | ~('\'' | '\\'))* '\''; fragment BQUOTA_STRING: '`' ( '\\'. | '``' | ~('`'|'\\'))* '`'; diff --git a/sql/src/main/antlr/OpenDistroSQLParser.g4 b/sql/src/main/antlr/OpenDistroSQLParser.g4 index fb2d7eb83e..49422c061c 100644 --- a/sql/src/main/antlr/OpenDistroSQLParser.g4 +++ b/sql/src/main/antlr/OpenDistroSQLParser.g4 @@ -25,6 +25,8 @@ THE SOFTWARE. parser grammar OpenDistroSQLParser; +import OpenDistroSQLIdentifierParser; + options { tokenVocab=OpenDistroSQLLexer; } @@ -32,7 +34,7 @@ options { tokenVocab=OpenDistroSQLLexer; } // Root rule root - : sqlStatement? EOF + : sqlStatement? SEMI? EOF ; // Only SELECT @@ -57,17 +59,26 @@ selectStatement // Select Statement's Details querySpecification + : selectClause + fromClause? + ; + +selectClause : SELECT selectElements ; selectElements - : selectElement (COMMA selectElement)* + : (star=STAR | selectElement) (',' selectElement)* ; selectElement : expression #selectExpressionElement ; +fromClause + : FROM tableName + ; + // Literals @@ -76,6 +87,7 @@ constant | sign? decimalLiteral #signedDecimal | sign? realLiteral #signedReal | booleanLiteral #boolean + | datetimeLiteral #datetime // Doesn't support the following types for now //| nullLiteral #null //| BIT_STRING @@ -88,14 +100,7 @@ decimalLiteral ; stringLiteral - : ( - STRING_LITERAL - | START_NATIONAL_STRING_LITERAL - ) STRING_LITERAL+ - | ( - STRING_LITERAL - | START_NATIONAL_STRING_LITERAL - ) + : STRING_LITERAL ; booleanLiteral @@ -114,6 +119,25 @@ nullLiteral : NULL_LITERAL ; +// Date and Time Literal, follow ANSI 92 +datetimeLiteral + : dateLiteral + | timeLiteral + | timestampLiteral + ; + +dateLiteral + : DATE date=stringLiteral + ; + +timeLiteral + : TIME time=stringLiteral + ; + +timestampLiteral + : TIMESTAMP timestamp=stringLiteral + ; + // Expressions, predicates // Simplified approach for expression @@ -141,11 +165,26 @@ functionCall ; scalarFunctionName - : ABS + : mathematicalFunctionName + | dateTimeFunctionName + ; + +mathematicalFunctionName + : ABS | CEIL | CEILING | CONV | CRC32 | E | EXP | FLOOR | LN | LOG | LOG10 | LOG2 | MOD | PI | POW | POWER + | RAND | ROUND | SIGN | SQRT | TRUNCATE + | trigonometricFunctionName + ; + +trigonometricFunctionName + : ACOS | ASIN | ATAN | ATAN2 | COS | COT | DEGREES | RADIANS | SIN | TAN + ; + +dateTimeFunctionName + : DAYOFMONTH ; functionArgs - : functionArg (COMMA functionArg)* + : (functionArg (COMMA functionArg)*)? ; functionArg diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java index 40f42293d2..0774bf769f 100644 --- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java +++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilder.java @@ -16,12 +16,17 @@ package com.amazon.opendistroforelasticsearch.sql.sql.parser; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.FromClauseContext; +import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.SelectClauseContext; import static com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.SimpleSelectContext; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Project; +import com.amazon.opendistroforelasticsearch.sql.ast.tree.Relation; import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Values; +import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.QuerySpecificationContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParserBaseVisitor; import com.google.common.collect.ImmutableList; import java.util.Collections; @@ -35,21 +40,47 @@ */ public class AstBuilder extends OpenDistroSQLParserBaseVisitor { + private static final Project SELECT_ALL = null; + private final AstExpressionBuilder expressionBuilder = new AstExpressionBuilder(); @Override public UnresolvedPlan visitSimpleSelect(SimpleSelectContext ctx) { - List selectElements = ctx.querySpecification().selectElements().children; - Project project = new Project(selectElements.stream() - .map(this::visitAstExpression) - .filter(Objects::nonNull) - .collect(Collectors.toList())); - - // Attach an Values operator with only a empty row inside so that - // Project operator can have a chance to evaluate its expression - // though the evaluation doesn't have any dependency on what's in Values. - Values emptyValue = new Values(ImmutableList.of(Collections.emptyList())); - return project.attach(emptyValue); + QuerySpecificationContext query = ctx.querySpecification(); + UnresolvedPlan project = visit(query.selectClause()); + + if (query.fromClause() == null) { + if (project == SELECT_ALL) { + throw new SyntaxCheckException("No FROM clause found for select all"); + } + + // Attach an Values operator with only a empty row inside so that + // Project operator can have a chance to evaluate its expression + // though the evaluation doesn't have any dependency on what's in Values. + Values emptyValue = new Values(ImmutableList.of(Collections.emptyList())); + return project.attach(emptyValue); + } + + UnresolvedPlan relation = visit(query.fromClause()); + return (project == SELECT_ALL) ? relation : project.attach(relation); + } + + @Override + public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) { + if (ctx.selectElements().star != null) { //TODO: project operator should be required? + return SELECT_ALL; + } + + List selectElements = ctx.selectElements().children; + return new Project(selectElements.stream() + .map(this::visitAstExpression) + .filter(Objects::nonNull) + .collect(Collectors.toList())); + } + + @Override + public UnresolvedPlan visitFromClause(FromClauseContext ctx) { + return new Relation(visitAstExpression(ctx.tableName().qualifiedName())); } @Override diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java index 057d2dbf2f..b3fab91c47 100644 --- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java +++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilder.java @@ -26,17 +26,43 @@ import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Function; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.QualifiedName; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.IdentContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.NestedExpressionAtomContext; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.QualifiedNameContext; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser.TableNameContext; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParserBaseVisitor; import java.util.Arrays; import java.util.stream.Collectors; +import org.antlr.v4.runtime.tree.RuleNode; /** * Expression builder to parse text to expression in AST. */ public class AstExpressionBuilder extends OpenDistroSQLParserBaseVisitor { + @Override + public UnresolvedExpression visitTableName(TableNameContext ctx) { + return new QualifiedName(visitQualifiedNameText(ctx)); + } + + @Override + public UnresolvedExpression visitIdent(IdentContext ctx) { + return new QualifiedName(visitQualifiedNameText(ctx)); + } + + @Override + public UnresolvedExpression visitQualifiedName(QualifiedNameContext ctx) { + return new QualifiedName( + ctx.ident() + .stream() + .map(this::visitQualifiedNameText) + .collect(Collectors.toList()) + ); + } + @Override public UnresolvedExpression visitMathExpressionAtom(MathExpressionAtomContext ctx) { return new Function( @@ -82,4 +108,24 @@ public UnresolvedExpression visitBoolean(BooleanContext ctx) { return AstDSL.booleanLiteral(Boolean.valueOf(ctx.getText())); } + @Override + public UnresolvedExpression visitDateLiteral(OpenDistroSQLParser.DateLiteralContext ctx) { + return AstDSL.dateLiteral(unquoteIdentifier(ctx.date.getText())); + } + + @Override + public UnresolvedExpression visitTimeLiteral(OpenDistroSQLParser.TimeLiteralContext ctx) { + return AstDSL.timeLiteral(unquoteIdentifier(ctx.time.getText())); + } + + @Override + public UnresolvedExpression visitTimestampLiteral( + OpenDistroSQLParser.TimestampLiteralContext ctx) { + return AstDSL.timestampLiteral(unquoteIdentifier(ctx.timestamp.getText())); + } + + private String visitQualifiedNameText(RuleNode node) { + return unquoteIdentifier(node.getText()); + } + } diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java index 7bd8d93b2b..94d668c4ba 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/antlr/SQLSyntaxParserTest.java @@ -20,22 +20,61 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; -import org.antlr.v4.runtime.tree.ParseTree; import org.junit.jupiter.api.Test; class SQLSyntaxParserTest { private final SQLSyntaxParser parser = new SQLSyntaxParser(); + @Test + public void canParseQueryEndWithSemiColon() { + assertNotNull(parser.parse("SELECT 123;")); + } + @Test public void canParseSelectLiterals() { - ParseTree parseTree = parser.parse("SELECT 123, 'hello'"); - assertNotNull(parseTree); + assertNotNull(parser.parse("SELECT 123, 'hello'")); + } + + @Test + public void canParseIndexNameWithDate() { + assertNotNull(parser.parse("SELECT * FROM logs_2020_01")); + assertNotNull(parser.parse("SELECT * FROM logs-2020-01")); + } + + @Test + public void canParseHiddenIndexName() { + assertNotNull(parser.parse("SELECT * FROM .kibana")); + } + + @Test + public void canNotParseIndexNameWithSpecialChar() { + assertThrows(SyntaxCheckException.class, + () -> parser.parse("SELECT * FROM hello+world")); + } + + @Test + public void canParseIndexNameWithSpecialCharQuoted() { + assertNotNull(parser.parse("SELECT * FROM `hello+world`")); + assertNotNull(parser.parse("SELECT * FROM \"hello$world\"")); + } + + @Test + public void canNotParseIndexNameStartingWithNumber() { + assertThrows(SyntaxCheckException.class, + () -> parser.parse("SELECT * FROM 123test")); + } + + @Test + public void canNotParseIndexNameSingleQuoted() { + assertThrows(SyntaxCheckException.class, + () -> parser.parse("SELECT * FROM 'test'")); } @Test public void canNotParseInvalidSelect() { - assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM test")); + assertThrows(SyntaxCheckException.class, + () -> parser.parse("SELECT * FROM test WHERE age = 10")); } } \ No newline at end of file diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java index a5546db97f..5f047b22b5 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstBuilderTest.java @@ -20,12 +20,15 @@ import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.doubleLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.intLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.project; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.relation; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.stringLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.values; import static java.util.Collections.emptyList; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; +import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; import com.amazon.opendistroforelasticsearch.sql.sql.antlr.SQLSyntaxParser; import org.antlr.v4.runtime.tree.ParseTree; import org.junit.jupiter.api.Test; @@ -43,7 +46,7 @@ class AstBuilderTest { private final AstBuilder astBuilder = new AstBuilder(); @Test - public void buildASTForSelectLiterals() { + public void canBuildSelectLiterals() { assertEquals( project( values(emptyList()), @@ -56,6 +59,24 @@ public void buildASTForSelectLiterals() { ); } + @Test + public void canBuildSelectAllFromIndex() { + assertEquals( + relation("test"), + buildAST("SELECT * FROM test") + ); + + assertThrows(SyntaxCheckException.class, () -> buildAST("SELECT *")); + } + + @Test + public void buildSelectFieldsFromIndex() { // TODO: change to select fields later + assertEquals( + project(relation("test"), intLiteral(1)), + buildAST("SELECT 1 FROM test") + ); + } + private UnresolvedPlan buildAST(String query) { ParseTree parseTree = parser.parse(query); return parseTree.accept(astBuilder); diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java index cd304b5d38..5dece1f09f 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstExpressionBuilderTest.java @@ -17,10 +17,13 @@ package com.amazon.opendistroforelasticsearch.sql.sql.parser; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.booleanLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.dateLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.doubleLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.function; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.intLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.stringLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.timeLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.timestampLiteral; import static org.junit.jupiter.api.Assertions.assertEquals; import com.amazon.opendistroforelasticsearch.sql.ast.Node; @@ -67,6 +70,30 @@ public void canBuildBooleanLiteral() { ); } + @Test + public void canBuildDateLiteral() { + assertEquals( + dateLiteral("2020-07-07"), + buildExprAst("DATE '2020-07-07'") + ); + } + + @Test + public void canBuildTimeLiteral() { + assertEquals( + timeLiteral("11:30:45"), + buildExprAst("TIME '11:30:45'") + ); + } + + @Test + public void canBuildTimestampLiteral() { + assertEquals( + timestampLiteral("2020-07-07 11:30:45"), + buildExprAst("TIMESTAMP '2020-07-07 11:30:45'") + ); + } + @Test public void canBuildArithmeticExpression() { assertEquals( @@ -107,6 +134,14 @@ public void canBuildNestedFunctionCall() { ); } + @Test + public void canBuildDateAndTimeFunctionCall() { + assertEquals( + function("dayofmonth", dateLiteral("2020-07-07")), + buildExprAst("dayofmonth(DATE '2020-07-07')") + ); + } + private Node buildExprAst(String expr) { OpenDistroSQLLexer lexer = new OpenDistroSQLLexer(new CaseInsensitiveCharStream(expr)); OpenDistroSQLParser parser = new OpenDistroSQLParser(new CommonTokenStream(lexer)); diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstQualifiedNameBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstQualifiedNameBuilderTest.java new file mode 100644 index 0000000000..e8c1506e7d --- /dev/null +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstQualifiedNameBuilderTest.java @@ -0,0 +1,95 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.sql.parser; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; +import com.amazon.opendistroforelasticsearch.sql.common.antlr.CaseInsensitiveCharStream; +import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxAnalysisErrorListener; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLLexer; +import com.amazon.opendistroforelasticsearch.sql.sql.antlr.parser.OpenDistroSQLParser; +import java.util.function.Function; +import lombok.RequiredArgsConstructor; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.tree.RuleNode; +import org.junit.jupiter.api.Test; + +public class AstQualifiedNameBuilderTest { + + @Test + public void canBuildRegularIdentifierForSQLStandard() { + buildFromIdentifier("test").expectQualifiedName("test"); + buildFromIdentifier("test123").expectQualifiedName("test123"); + buildFromIdentifier("test_123").expectQualifiedName("test_123"); + } + + @Test + public void canBuildRegularIdentifierForElasticsearch() { + buildFromTableName(".kibana").expectQualifiedName(".kibana"); + //buildFromIdentifier("@timestamp").expectQualifiedName("@timestamp");//TODO: field name + buildFromIdentifier("logs-2020-01").expectQualifiedName("logs-2020-01"); + buildFromIdentifier("*logs*").expectQualifiedName("*logs*"); + } + + @Test + public void canBuildDelimitedIdentifier() { + buildFromIdentifier("\"hello$world\"").expectQualifiedName("hello$world"); + buildFromIdentifier("`logs.2020.01`").expectQualifiedName("logs.2020.01"); + } + + @Test + public void canBuildQualifiedIdentifier() { + buildFromQualifiers("account.location.city").expectQualifiedName("account", "location", "city"); + } + + private AstExpressionBuilderAssertion buildFromIdentifier(String expr) { + return new AstExpressionBuilderAssertion(OpenDistroSQLParser::ident, expr); + } + + private AstExpressionBuilderAssertion buildFromQualifiers(String expr) { + return new AstExpressionBuilderAssertion(OpenDistroSQLParser::qualifiedName, expr); + } + + private AstExpressionBuilderAssertion buildFromTableName(String expr) { + return new AstExpressionBuilderAssertion(OpenDistroSQLParser::tableName, expr); + } + + @RequiredArgsConstructor + private static class AstExpressionBuilderAssertion { + private final AstExpressionBuilder astExprBuilder = new AstExpressionBuilder(); + private final Function build; + private final String actual; + + public void expectQualifiedName(String... expected) { + assertEquals(AstDSL.qualifiedName(expected), buildExpression(actual)); + } + + private UnresolvedExpression buildExpression(String expr) { + return build.apply(createParser(expr)).accept(astExprBuilder); + } + + private OpenDistroSQLParser createParser(String expr) { + OpenDistroSQLLexer lexer = new OpenDistroSQLLexer(new CaseInsensitiveCharStream(expr)); + OpenDistroSQLParser parser = new OpenDistroSQLParser(new CommonTokenStream(lexer)); + parser.addErrorListener(new SyntaxAnalysisErrorListener()); + return parser; + } + } + +}