diff --git a/DEVELOPER_GUIDE.rst b/DEVELOPER_GUIDE.rst index 5b1b4bd633..5b27288204 100644 --- a/DEVELOPER_GUIDE.rst +++ b/DEVELOPER_GUIDE.rst @@ -185,14 +185,31 @@ Note that other related project code has already merged into this single reposit Code Convention --------------- -We’re integrated Checkstyle plugin into Gradle build: https://github.com/opensearch-project/sql/blob/main/config/checkstyle/google_checks.xml. So any violation will fail the build. You need to identify the offending code from Gradle error message and fix them and rerun the Gradle build. Here are the highlight of some Checkstyle rules: +Java files in the OpenSearch codebase are formatted with the Eclipse JDT formatter, using the `Spotless Gradle `_ plugin. This plugin is configured in the project `./gradle.properties`. -* 2 spaces indentation. -* No line starts with tab character in source file. -* Line width <= 100 characters. -* Wildcard imports: You can enforce single import by configuring your IDE. Instructions for Intellij IDEA: https://www.jetbrains.com/help/idea/creating-and-optimizing-imports.html#disable-wildcard-imports. -* Operator needs to wrap at next line. +The formatting check can be run explicitly with:: +./gradlew spotlessJavaCheck + +The code can be formatted with:: + +./gradlew spotlessApply + +These tasks can also be run for specific modules, e.g.:: + +./gradlew server:spotlessJavaCheck + +For more information on the spotless for the OpenSearch project please see `https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#java-language-formatting-guidelines `_. + +Java files are formatted using `Spotless `_ conforming to `Google Java Format `_. + * - New line at end of file + * - No unused import statements + * - Fix import order to be alphabetical with static imports first (one block for static and one for non-static imports) + * - Max line length is 100 characters (does not apply to import statements) + * - Line spacing is 2 spaces + * - Javadocs should be properly formatted in accordance to `Javadoc guidelines `_ + * - Javadoc format can be maintained by wrapping javadoc with `
` HTML tags
+   * - Strings can be formatted on multiple lines with a `+` with the correct indentation for the string.
 
 Building and Running Tests
 ==========================
diff --git a/build.gradle b/build.gradle
index 71f94636b5..4dac89e956 100644
--- a/build.gradle
+++ b/build.gradle
@@ -85,7 +85,9 @@ spotless {
     java {
         target fileTree('.') {
             include 'core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java',
-                    'core/src/test/java/org/opensearch/sql/data/**/*.java',
+            include 'core/src/main/java/org/opensearch/sql/monitor/**/*.java',
+                    'core/src/main/java/org/opensearch/sql/exception/**/*.java',
+                    'core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java',
                     'core/src/test/java/org/opensearch/sql/config/**/*.java',
                     'core/src/test/java/org/opensearch/sql/analysis/**/*.java',
                     'core/src/main/java/org/opensearch/sql/planner/**/*.java',
@@ -94,11 +96,11 @@ spotless {
                     'core/src/main/java/org/opensearch/sql/monitor/**/*.java',
                     'core/src/main/java/org/opensearch/sql/expression/**/*.java',
                     'core/src/main/java/org/opensearch/sql/executor/**/*.java',
-                    'core/src/main/java/org/opensearch/sql/exception/**/*.java',
                     'core/src/main/java/org/opensearch/sql/analysis/**/*.java',
                     'core/src/test/java/org/opensearch/sql/data/**/*.java',
                     'core/src/test/java/org/opensearch/sql/datasource/**/*.java',
                     'core/src/test/java/org/opensearch/sql/ast/**/*.java'
+                    'core/src/main/java/org/opensearch/sql/utils/**/*.java'
             exclude '**/build/**', '**/build-*/**'
         }
         importOrder()
diff --git a/core/build.gradle b/core/build.gradle
index 0e563b274e..113038c7d5 100644
--- a/core/build.gradle
+++ b/core/build.gradle
@@ -26,7 +26,6 @@ plugins {
     id 'java-library'
     id "io.freefair.lombok"
     id 'jacoco'
-    id 'info.solidsoft.pitest' version '1.9.0'
     id 'java-test-fixtures'
 }
 
@@ -34,20 +33,10 @@ repositories {
     mavenCentral()
 }
 
-// Being ignored as a temporary measure before being removed in favour of
-// spotless https://github.com/opensearch-project/sql/issues/1101
 checkstyleTest.ignoreFailures = true
 checkstyleMain.ignoreFailures = true
 checkstyleTestFixtures.ignoreFailures = true
 
-pitest {
-    targetClasses = ['org.opensearch.sql.*']
-    pitestVersion = '1.9.0'
-    threads = 4
-    outputFormats = ['HTML', 'XML']
-    timestampedReports = false
-    junit5PluginVersion = '1.0.0'
-}
 
 dependencies {
     api group: 'com.google.guava', name: 'guava', version: '32.0.1-jre'
diff --git a/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java b/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java
index 47988097c3..9c9dfa0772 100644
--- a/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java
+++ b/core/src/main/java/org/opensearch/sql/DataSourceSchemaName.java
@@ -17,5 +17,4 @@ public class DataSourceSchemaName {
   private final String dataSourceName;
 
   private final String schemaName;
-
 }
diff --git a/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java b/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java
index 4704d0566b..f1f29e9b38 100644
--- a/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java
+++ b/core/src/main/java/org/opensearch/sql/analysis/AnalysisContext.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.analysis;
 
 import java.util.ArrayList;
@@ -13,19 +12,14 @@
 import org.opensearch.sql.expression.NamedExpression;
 import org.opensearch.sql.expression.function.FunctionProperties;
 
-/**
- * The context used for Analyzer.
- */
+/** The context used for Analyzer. */
 public class AnalysisContext {
-  /**
-   * Environment stack for symbol scope management.
-   */
+  /** Environment stack for symbol scope management. */
   private TypeEnvironment environment;
-  @Getter
-  private final List namedParseExpressions;
 
-  @Getter
-  private final FunctionProperties functionProperties;
+  @Getter private final List namedParseExpressions;
+
+  @Getter private final FunctionProperties functionProperties;
 
   public AnalysisContext() {
     this(new TypeEnvironment(null));
@@ -33,6 +27,7 @@ public AnalysisContext() {
 
   /**
    * Class CTOR.
+   *
    * @param environment Env to set to a new instance.
    */
   public AnalysisContext(TypeEnvironment environment) {
@@ -41,9 +36,7 @@ public AnalysisContext(TypeEnvironment environment) {
     this.functionProperties = new FunctionProperties();
   }
 
-  /**
-   * Push a new environment.
-   */
+  /** Push a new environment. */
   public void push() {
     environment = new TypeEnvironment(environment);
   }
diff --git a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java
index 2c4647004c..d5e8b93b13 100644
--- a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java
+++ b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.analysis;
 
 import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME;
@@ -29,7 +28,6 @@
 import java.util.stream.Collectors;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.commons.math3.analysis.function.Exp;
 import org.opensearch.sql.DataSourceSchemaName;
 import org.opensearch.sql.analysis.symbol.Namespace;
 import org.opensearch.sql.analysis.symbol.Symbol;
@@ -122,9 +120,7 @@ public class Analyzer extends AbstractNodeVisitor
 
   private final BuiltinFunctionRepository repository;
 
-  /**
-   * Constructor.
-   */
+  /** Constructor. */
   public Analyzer(
       ExpressionAnalyzer expressionAnalyzer,
       DataSourceService dataSourceService,
@@ -143,8 +139,8 @@ public LogicalPlan analyze(UnresolvedPlan unresolved, AnalysisContext context) {
   @Override
   public LogicalPlan visitRelation(Relation node, AnalysisContext context) {
     QualifiedName qualifiedName = node.getTableQualifiedName();
-    DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver
-        = new DataSourceSchemaIdentifierNameResolver(dataSourceService, qualifiedName.getParts());
+    DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver =
+        new DataSourceSchemaIdentifierNameResolver(dataSourceService, qualifiedName.getParts());
     String tableName = dataSourceSchemaIdentifierNameResolver.getIdentifierName();
     context.push();
     TypeEnvironment curEnv = context.peek();
@@ -152,28 +148,30 @@ public LogicalPlan visitRelation(Relation node, AnalysisContext context) {
     if (DATASOURCES_TABLE_NAME.equals(tableName)) {
       table = new DataSourceTable(dataSourceService);
     } else {
-      table = dataSourceService
-          .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName())
-          .getStorageEngine()
-          .getTable(new DataSourceSchemaName(
-              dataSourceSchemaIdentifierNameResolver.getDataSourceName(),
-              dataSourceSchemaIdentifierNameResolver.getSchemaName()),
-              dataSourceSchemaIdentifierNameResolver.getIdentifierName());
+      table =
+          dataSourceService
+              .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName())
+              .getStorageEngine()
+              .getTable(
+                  new DataSourceSchemaName(
+                      dataSourceSchemaIdentifierNameResolver.getDataSourceName(),
+                      dataSourceSchemaIdentifierNameResolver.getSchemaName()),
+                  dataSourceSchemaIdentifierNameResolver.getIdentifierName());
     }
     table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v));
-    table.getReservedFieldTypes().forEach(
-        (k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)
-    );
+    table
+        .getReservedFieldTypes()
+        .forEach((k, v) -> curEnv.define(new Symbol(Namespace.HIDDEN_FIELD_NAME, k), v));
 
     // Put index name or its alias in index namespace on type environment so qualifier
     // can be removed when analyzing qualified name. The value (expr type) here doesn't matter.
-    curEnv.define(new Symbol(Namespace.INDEX_NAME,
-        (node.getAlias() == null) ? tableName : node.getAlias()), STRUCT);
+    curEnv.define(
+        new Symbol(Namespace.INDEX_NAME, (node.getAlias() == null) ? tableName : node.getAlias()),
+        STRUCT);
 
     return new LogicalRelation(tableName, table);
   }
 
-
   @Override
   public LogicalPlan visitRelationSubquery(RelationSubquery node, AnalysisContext context) {
     LogicalPlan subquery = analyze(node.getChild().get(0), context);
@@ -189,30 +187,41 @@ public LogicalPlan visitRelationSubquery(RelationSubquery node, AnalysisContext
   @Override
   public LogicalPlan visitTableFunction(TableFunction node, AnalysisContext context) {
     QualifiedName qualifiedName = node.getFunctionName();
-    DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver
-        = new DataSourceSchemaIdentifierNameResolver(this.dataSourceService,
-        qualifiedName.getParts());
-
-    FunctionName functionName
-        = FunctionName.of(dataSourceSchemaIdentifierNameResolver.getIdentifierName());
-    List arguments = node.getArguments().stream()
-        .map(unresolvedExpression -> this.expressionAnalyzer.analyze(unresolvedExpression, context))
-        .collect(Collectors.toList());
-    TableFunctionImplementation tableFunctionImplementation
-        = (TableFunctionImplementation) repository.compile(context.getFunctionProperties(),
-        dataSourceService
-            .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName())
-            .getStorageEngine().getFunctions(), functionName, arguments);
+    DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver =
+        new DataSourceSchemaIdentifierNameResolver(
+            this.dataSourceService, qualifiedName.getParts());
+
+    FunctionName functionName =
+        FunctionName.of(dataSourceSchemaIdentifierNameResolver.getIdentifierName());
+    List arguments =
+        node.getArguments().stream()
+            .map(
+                unresolvedExpression ->
+                    this.expressionAnalyzer.analyze(unresolvedExpression, context))
+            .collect(Collectors.toList());
+    TableFunctionImplementation tableFunctionImplementation =
+        (TableFunctionImplementation)
+            repository.compile(
+                context.getFunctionProperties(),
+                dataSourceService
+                    .getDataSource(dataSourceSchemaIdentifierNameResolver.getDataSourceName())
+                    .getStorageEngine()
+                    .getFunctions(),
+                functionName,
+                arguments);
     context.push();
     TypeEnvironment curEnv = context.peek();
     Table table = tableFunctionImplementation.applyArguments();
     table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v));
-    table.getReservedFieldTypes().forEach(
-        (k, v) -> curEnv.addReservedWord(new Symbol(Namespace.FIELD_NAME, k), v)
-    );
-    curEnv.define(new Symbol(Namespace.INDEX_NAME,
-            dataSourceSchemaIdentifierNameResolver.getIdentifierName()), STRUCT);
-    return new LogicalRelation(dataSourceSchemaIdentifierNameResolver.getIdentifierName(),
+    table
+        .getReservedFieldTypes()
+        .forEach((k, v) -> curEnv.define(new Symbol(Namespace.HIDDEN_FIELD_NAME, k), v));
+    curEnv.define(
+        new Symbol(
+            Namespace.INDEX_NAME, dataSourceSchemaIdentifierNameResolver.getIdentifierName()),
+        STRUCT);
+    return new LogicalRelation(
+        dataSourceSchemaIdentifierNameResolver.getIdentifierName(),
         tableFunctionImplementation.applyArguments());
   }
 
@@ -234,30 +243,28 @@ public LogicalPlan visitFilter(Filter node, AnalysisContext context) {
   }
 
   /**
-   * Ensure NESTED function is not used in GROUP BY, and HAVING clauses.
-   * Fallback to legacy engine. Can remove when support is added for NESTED function in WHERE,
-   * GROUP BY, ORDER BY, and HAVING clauses.
+   * Ensure NESTED function is not used in GROUP BY, and HAVING clauses. Fallback to legacy engine.
+   * Can remove when support is added for NESTED function in WHERE, GROUP BY, ORDER BY, and HAVING
+   * clauses.
+   *
    * @param condition : Filter condition
    */
   private void verifySupportsCondition(Expression condition) {
     if (condition instanceof FunctionExpression) {
-      if (((FunctionExpression) condition).getFunctionName().getFunctionName().equalsIgnoreCase(
-          BuiltinFunctionName.NESTED.name()
-      )) {
+      if (((FunctionExpression) condition)
+          .getFunctionName()
+          .getFunctionName()
+          .equalsIgnoreCase(BuiltinFunctionName.NESTED.name())) {
         throw new SyntaxCheckException(
             "Falling back to legacy engine. Nested function is not supported in WHERE,"
-                + " GROUP BY, and HAVING clauses."
-        );
+                + " GROUP BY, and HAVING clauses.");
       }
-      ((FunctionExpression)condition).getArguments().stream()
-          .forEach(e -> verifySupportsCondition(e)
-      );
+      ((FunctionExpression) condition)
+          .getArguments().stream().forEach(e -> verifySupportsCondition(e));
     }
   }
 
-  /**
-   * Build {@link LogicalRename}.
-   */
+  /** Build {@link LogicalRename}. */
   @Override
   public LogicalPlan visitRename(Rename node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -268,8 +275,8 @@ public LogicalPlan visitRename(Rename node, AnalysisContext context) {
       // We should define the new target field in the context instead of analyze it.
       if (renameMap.getTarget() instanceof Field) {
         ReferenceExpression target =
-            new ReferenceExpression(((Field) renameMap.getTarget()).getField().toString(),
-                origin.type());
+            new ReferenceExpression(
+                ((Field) renameMap.getTarget()).getField().toString(), origin.type());
         ReferenceExpression originExpr = DSL.ref(origin.toString(), origin.type());
         TypeEnvironment curEnv = context.peek();
         curEnv.remove(originExpr);
@@ -284,17 +291,15 @@ public LogicalPlan visitRename(Rename node, AnalysisContext context) {
     return new LogicalRename(child, renameMapBuilder.build());
   }
 
-  /**
-   * Build {@link LogicalAggregation}.
-   */
+  /** Build {@link LogicalAggregation}. */
   @Override
   public LogicalPlan visitAggregation(Aggregation node, AnalysisContext context) {
     final LogicalPlan child = node.getChild().get(0).accept(this, context);
     ImmutableList.Builder aggregatorBuilder = new ImmutableList.Builder<>();
     for (UnresolvedExpression expr : node.getAggExprList()) {
       NamedExpression aggExpr = namedExpressionAnalyzer.analyze(expr, context);
-      aggregatorBuilder
-          .add(new NamedAggregator(aggExpr.getNameOrAlias(), (Aggregator) aggExpr.getDelegated()));
+      aggregatorBuilder.add(
+          new NamedAggregator(aggExpr.getNameOrAlias(), (Aggregator) aggExpr.getDelegated()));
     }
 
     ImmutableList.Builder groupbyBuilder = new ImmutableList.Builder<>();
@@ -314,16 +319,17 @@ public LogicalPlan visitAggregation(Aggregation node, AnalysisContext context) {
     // new context
     context.push();
     TypeEnvironment newEnv = context.peek();
-    aggregators.forEach(aggregator -> newEnv.define(new Symbol(Namespace.FIELD_NAME,
-        aggregator.getName()), aggregator.type()));
-    groupBys.forEach(group -> newEnv.define(new Symbol(Namespace.FIELD_NAME,
-        group.getNameOrAlias()), group.type()));
+    aggregators.forEach(
+        aggregator ->
+            newEnv.define(
+                new Symbol(Namespace.FIELD_NAME, aggregator.getName()), aggregator.type()));
+    groupBys.forEach(
+        group ->
+            newEnv.define(new Symbol(Namespace.FIELD_NAME, group.getNameOrAlias()), group.type()));
     return new LogicalAggregation(child, aggregators, groupBys);
   }
 
-  /**
-   * Build {@link LogicalRareTopN}.
-   */
+  /** Build {@link LogicalRareTopN}. */
   @Override
   public LogicalPlan visitRareTopN(RareTopN node, AnalysisContext context) {
     final LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -343,10 +349,10 @@ public LogicalPlan visitRareTopN(RareTopN node, AnalysisContext context) {
     // new context
     context.push();
     TypeEnvironment newEnv = context.peek();
-    groupBys.forEach(group -> newEnv.define(new Symbol(Namespace.FIELD_NAME,
-        group.toString()), group.type()));
-    fields.forEach(field -> newEnv.define(new Symbol(Namespace.FIELD_NAME,
-        field.toString()), field.type()));
+    groupBys.forEach(
+        group -> newEnv.define(new Symbol(Namespace.FIELD_NAME, group.toString()), group.type()));
+    fields.forEach(
+        field -> newEnv.define(new Symbol(Namespace.FIELD_NAME, field.toString()), field.type()));
 
     List options = node.getNoOfResults();
     Integer noOfResults = (Integer) options.get(0).getValue().getValue();
@@ -397,28 +403,28 @@ public LogicalPlan visitProject(Project node, AnalysisContext context) {
     }
 
     List namedExpressions =
-        selectExpressionAnalyzer.analyze(node.getProjectList(), context,
+        selectExpressionAnalyzer.analyze(
+            node.getProjectList(),
+            context,
             new ExpressionReferenceOptimizer(expressionAnalyzer.getRepository(), child));
 
     for (UnresolvedExpression expr : node.getProjectList()) {
-      NestedAnalyzer nestedAnalyzer = new NestedAnalyzer(
-          namedExpressions, expressionAnalyzer, child
-      );
+      NestedAnalyzer nestedAnalyzer =
+          new NestedAnalyzer(namedExpressions, expressionAnalyzer, child);
       child = nestedAnalyzer.analyze(expr, context);
     }
 
     // new context
     context.push();
     TypeEnvironment newEnv = context.peek();
-    namedExpressions.forEach(expr -> newEnv.define(new Symbol(Namespace.FIELD_NAME,
-        expr.getNameOrAlias()), expr.type()));
+    namedExpressions.forEach(
+        expr ->
+            newEnv.define(new Symbol(Namespace.FIELD_NAME, expr.getNameOrAlias()), expr.type()));
     List namedParseExpressions = context.getNamedParseExpressions();
     return new LogicalProject(child, namedExpressions, namedParseExpressions);
   }
 
-  /**
-   * Build {@link LogicalEval}.
-   */
+  /** Build {@link LogicalEval}. */
   @Override
   public LogicalPlan visitEval(Eval node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -435,9 +441,7 @@ public LogicalPlan visitEval(Eval node, AnalysisContext context) {
     return new LogicalEval(child, expressionsBuilder.build());
   }
 
-  /**
-   * Build {@link ParseExpression} to context and skip to child nodes.
-   */
+  /** Build {@link ParseExpression} to context and skip to child nodes. */
   @Override
   public LogicalPlan visitParse(Parse node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -448,18 +452,19 @@ public LogicalPlan visitParse(Parse node, AnalysisContext context) {
     Expression patternExpression = DSL.literal(pattern);
 
     TypeEnvironment curEnv = context.peek();
-    ParseUtils.getNamedGroupCandidates(parseMethod, pattern, arguments).forEach(group -> {
-      ParseExpression expr = ParseUtils.createParseExpression(parseMethod, sourceField,
-          patternExpression, DSL.literal(group));
-      curEnv.define(new Symbol(Namespace.FIELD_NAME, group), expr.type());
-      context.getNamedParseExpressions().add(new NamedExpression(group, expr));
-    });
+    ParseUtils.getNamedGroupCandidates(parseMethod, pattern, arguments)
+        .forEach(
+            group -> {
+              ParseExpression expr =
+                  ParseUtils.createParseExpression(
+                      parseMethod, sourceField, patternExpression, DSL.literal(group));
+              curEnv.define(new Symbol(Namespace.FIELD_NAME, group), expr.type());
+              context.getNamedParseExpressions().add(new NamedExpression(group, expr));
+            });
     return child;
   }
 
-  /**
-   * Build {@link LogicalSort}.
-   */
+  /** Build {@link LogicalSort}. */
   @Override
   public LogicalPlan visitSort(Sort node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -473,8 +478,7 @@ public LogicalPlan visitSort(Sort node, AnalysisContext context) {
                   var analyzed = expressionAnalyzer.analyze(sortField.getField(), context);
                   if (analyzed == null) {
                     throw new UnsupportedOperationException(
-                        String.format("Invalid use of expression %s", sortField.getField())
-                    );
+                        String.format("Invalid use of expression %s", sortField.getField()));
                   }
                   Expression expression = optimizer.optimize(analyzed, context);
                   return ImmutablePair.of(analyzeSortOption(sortField.getFieldArgs()), expression);
@@ -483,9 +487,7 @@ public LogicalPlan visitSort(Sort node, AnalysisContext context) {
     return new LogicalSort(child, sortList);
   }
 
-  /**
-   * Build {@link LogicalDedupe}.
-   */
+  /** Build {@link LogicalDedupe}. */
   @Override
   public LogicalPlan visitDedupe(Dedupe node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -505,9 +507,7 @@ public LogicalPlan visitDedupe(Dedupe node, AnalysisContext context) {
         consecutive);
   }
 
-  /**
-   * Logical head is identical to {@link LogicalLimit}.
-   */
+  /** Logical head is identical to {@link LogicalLimit}. */
   public LogicalPlan visitHead(Head node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
     return new LogicalLimit(child, node.getSize(), node.getFrom());
@@ -518,16 +518,15 @@ public LogicalPlan visitValues(Values node, AnalysisContext context) {
     List> values = node.getValues();
     List> valueExprs = new ArrayList<>();
     for (List value : values) {
-      valueExprs.add(value.stream()
-          .map(val -> (LiteralExpression) expressionAnalyzer.analyze(val, context))
-          .collect(Collectors.toList()));
+      valueExprs.add(
+          value.stream()
+              .map(val -> (LiteralExpression) expressionAnalyzer.analyze(val, context))
+              .collect(Collectors.toList()));
     }
     return new LogicalValues(valueExprs);
   }
 
-  /**
-   * Build {@link LogicalMLCommons} for Kmeans command.
-   */
+  /** Build {@link LogicalMLCommons} for Kmeans command. */
   @Override
   public LogicalPlan visitKmeans(Kmeans node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -539,9 +538,7 @@ public LogicalPlan visitKmeans(Kmeans node, AnalysisContext context) {
     return new LogicalMLCommons(child, "kmeans", options);
   }
 
-  /**
-   * Build {@link LogicalAD} for AD command.
-   */
+  /** Build {@link LogicalAD} for AD command. */
   @Override
   public LogicalPlan visitAD(AD node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
@@ -554,21 +551,21 @@ public LogicalPlan visitAD(AD node, AnalysisContext context) {
       currentEnv.define(new Symbol(Namespace.FIELD_NAME, RCF_ANOMALOUS), ExprCoreType.BOOLEAN);
     } else {
       currentEnv.define(new Symbol(Namespace.FIELD_NAME, RCF_ANOMALY_GRADE), ExprCoreType.DOUBLE);
-      currentEnv.define(new Symbol(Namespace.FIELD_NAME,
-              (String) node.getArguments().get(TIME_FIELD).getValue()), ExprCoreType.TIMESTAMP);
+      currentEnv.define(
+          new Symbol(Namespace.FIELD_NAME, (String) node.getArguments().get(TIME_FIELD).getValue()),
+          ExprCoreType.TIMESTAMP);
     }
     return new LogicalAD(child, options);
   }
 
-  /**
-   * Build {@link LogicalML} for ml command.
-   */
+  /** Build {@link LogicalML} for ml command. */
   @Override
   public LogicalPlan visitML(ML node, AnalysisContext context) {
     LogicalPlan child = node.getChild().get(0).accept(this, context);
     TypeEnvironment currentEnv = context.peek();
     node.getOutputSchema(currentEnv).entrySet().stream()
-      .forEach(v -> currentEnv.define(new Symbol(Namespace.FIELD_NAME, v.getKey()), v.getValue()));
+        .forEach(
+            v -> currentEnv.define(new Symbol(Namespace.FIELD_NAME, v.getKey()), v.getValue()));
 
     return new LogicalML(child, node.getArguments());
   }
@@ -581,8 +578,9 @@ public LogicalPlan visitPaginate(Paginate paginate, AnalysisContext context) {
 
   @Override
   public LogicalPlan visitFetchCursor(FetchCursor cursor, AnalysisContext context) {
-    return new LogicalFetchCursor(cursor.getCursor(),
-      dataSourceService.getDataSource(DEFAULT_DATASOURCE_NAME).getStorageEngine());
+    return new LogicalFetchCursor(
+        cursor.getCursor(),
+        dataSourceService.getDataSource(DEFAULT_DATASOURCE_NAME).getStorageEngine());
   }
 
   @Override
@@ -591,13 +589,13 @@ public LogicalPlan visitCloseCursor(CloseCursor closeCursor, AnalysisContext con
   }
 
   /**
-   * The first argument is always "asc", others are optional.
-   * Given nullFirst argument, use its value. Otherwise just use DEFAULT_ASC/DESC.
+   * The first argument is always "asc", others are optional. Given nullFirst argument, use its
+   * value. Otherwise just use DEFAULT_ASC/DESC.
    */
   private SortOption analyzeSortOption(List fieldArgs) {
     Boolean asc = (Boolean) fieldArgs.get(0).getValue().getValue();
-    Optional nullFirst = fieldArgs.stream()
-        .filter(option -> "nullFirst".equals(option.getArgName())).findFirst();
+    Optional nullFirst =
+        fieldArgs.stream().filter(option -> "nullFirst".equals(option.getArgName())).findFirst();
 
     if (nullFirst.isPresent()) {
       Boolean isNullFirst = (Boolean) nullFirst.get().getValue().getValue();
diff --git a/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java b/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java
index 1bb8316907..01145dc7df 100644
--- a/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java
+++ b/core/src/main/java/org/opensearch/sql/analysis/DataSourceSchemaIdentifierNameResolver.java
@@ -7,13 +7,8 @@
 
 package org.opensearch.sql.analysis;
 
-import com.google.common.collect.ImmutableSet;
 import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.opensearch.sql.ast.expression.QualifiedName;
 import org.opensearch.sql.datasource.DataSourceService;
-import org.opensearch.sql.datasource.model.DataSourceMetadata;
 
 public class DataSourceSchemaIdentifierNameResolver {
 
@@ -29,19 +24,17 @@ public class DataSourceSchemaIdentifierNameResolver {
   private static final String DOT = ".";
 
   /**
-   * Data model for capturing dataSourceName, schema and identifier from
-   * fully qualifiedName. In the current state, it is used to capture
-   * DataSourceSchemaTable name and DataSourceSchemaFunction in case of table
-   * functions.
+   * Data model for capturing dataSourceName, schema and identifier from fully qualifiedName. In the
+   * current state, it is used to capture DataSourceSchemaTable name and DataSourceSchemaFunction in
+   * case of table functions.
    *
    * @param dataSourceService {@link DataSourceService}.
-   * @param parts           parts of qualifiedName.
+   * @param parts parts of qualifiedName.
    */
-  public DataSourceSchemaIdentifierNameResolver(DataSourceService dataSourceService,
-                                                List parts) {
+  public DataSourceSchemaIdentifierNameResolver(
+      DataSourceService dataSourceService, List parts) {
     this.dataSourceService = dataSourceService;
-    List remainingParts
-        = captureSchemaName(captureDataSourceName(parts));
+    List remainingParts = captureSchemaName(captureDataSourceName(parts));
     identifierName = String.join(DOT, remainingParts);
   }
 
@@ -57,7 +50,6 @@ public String getSchemaName() {
     return schemaName;
   }
 
-
   // Capture datasource name and return remaining parts(schema name and table name)
   // from the fully qualified name.
   private List captureDataSourceName(List parts) {
@@ -74,12 +66,11 @@ private List captureDataSourceName(List parts) {
   private List captureSchemaName(List parts) {
     if (parts.size() > 1
         && (DEFAULT_SCHEMA_NAME.equals(parts.get(0))
-        || INFORMATION_SCHEMA_NAME.contains(parts.get(0)))) {
+            || INFORMATION_SCHEMA_NAME.contains(parts.get(0)))) {
       schemaName = parts.get(0);
       return parts.subList(1, parts.size());
     } else {
       return parts;
     }
   }
-
 }
diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java
index 601e3e00cc..5a8d6fe976 100644
--- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java
+++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.analysis;
 
 import static org.opensearch.sql.ast.dsl.AstDSL.and;
@@ -49,7 +48,6 @@
 import org.opensearch.sql.ast.expression.When;
 import org.opensearch.sql.ast.expression.WindowFunction;
 import org.opensearch.sql.ast.expression.Xor;
-import org.opensearch.sql.common.antlr.SyntaxCheckException;
 import org.opensearch.sql.data.model.ExprValueUtils;
 import org.opensearch.sql.data.type.ExprCoreType;
 import org.opensearch.sql.data.type.ExprType;
@@ -78,19 +76,19 @@
  * Expression}.
  */
 public class ExpressionAnalyzer extends AbstractNodeVisitor {
-  @Getter
-  private final BuiltinFunctionRepository repository;
+  @Getter private final BuiltinFunctionRepository repository;
 
   @Override
   public Expression visitCast(Cast node, AnalysisContext context) {
     final Expression expression = node.getExpression().accept(this, context);
-    return (Expression) repository
-        .compile(context.getFunctionProperties(), node.convertFunctionName(),
+    return (Expression)
+        repository.compile(
+            context.getFunctionProperties(),
+            node.convertFunctionName(),
             Collections.singletonList(expression));
   }
 
-  public ExpressionAnalyzer(
-      BuiltinFunctionRepository repository) {
+  public ExpressionAnalyzer(BuiltinFunctionRepository repository) {
     this.repository = repository;
   }
 
@@ -113,8 +111,8 @@ public Expression visitEqualTo(EqualTo node, AnalysisContext context) {
 
   @Override
   public Expression visitLiteral(Literal node, AnalysisContext context) {
-    return DSL
-        .literal(ExprValueUtils.fromObjectValue(node.getValue(), node.getType().getCoreType()));
+    return DSL.literal(
+        ExprValueUtils.fromObjectValue(node.getValue(), node.getType().getCoreType()));
   }
 
   @Override
@@ -163,8 +161,12 @@ public Expression visitAggregateFunction(AggregateFunction node, AnalysisContext
       for (UnresolvedExpression arg : node.getArgList()) {
         builder.add(arg.accept(this, context));
       }
-      Aggregator aggregator = (Aggregator) repository.compile(
-          context.getFunctionProperties(), builtinFunctionName.get().getName(), builder.build());
+      Aggregator aggregator =
+          (Aggregator)
+              repository.compile(
+                  context.getFunctionProperties(),
+                  builtinFunctionName.get().getName(),
+                  builder.build());
       aggregator.distinct(node.getDistinct());
       if (node.condition() != null) {
         aggregator.condition(analyze(node.condition(), context));
@@ -177,8 +179,8 @@ public Expression visitAggregateFunction(AggregateFunction node, AnalysisContext
 
   @Override
   public Expression visitRelevanceFieldList(RelevanceFieldList node, AnalysisContext context) {
-    return new LiteralExpression(ExprValueUtils.tupleValue(
-        ImmutableMap.copyOf(node.getFieldList())));
+    return new LiteralExpression(
+        ExprValueUtils.tupleValue(ImmutableMap.copyOf(node.getFieldList())));
   }
 
   @Override
@@ -186,19 +188,19 @@ public Expression visitFunction(Function node, AnalysisContext context) {
     FunctionName functionName = FunctionName.of(node.getFuncName());
     List arguments =
         node.getFuncArgs().stream()
-            .map(unresolvedExpression -> {
-              var ret = analyze(unresolvedExpression, context);
-              if (ret == null) {
-                throw new UnsupportedOperationException(
-                    String.format("Invalid use of expression %s", unresolvedExpression)
-                );
-              } else {
-                return ret;
-              }
-            })
+            .map(
+                unresolvedExpression -> {
+                  var ret = analyze(unresolvedExpression, context);
+                  if (ret == null) {
+                    throw new UnsupportedOperationException(
+                        String.format("Invalid use of expression %s", unresolvedExpression));
+                  } else {
+                    return ret;
+                  }
+                })
             .collect(Collectors.toList());
-    return (Expression) repository.compile(context.getFunctionProperties(),
-        functionName, arguments);
+    return (Expression)
+        repository.compile(context.getFunctionProperties(), functionName, arguments);
   }
 
   @SuppressWarnings("unchecked")
@@ -220,18 +222,20 @@ public Expression visitHighlightFunction(HighlightFunction node, AnalysisContext
 
   /**
    * visitScoreFunction removes the score function from the AST and replaces it with the child
-   * relevance function node. If the optional boost variable is provided, the boost argument
-   * of the relevance function is combined.
+   * relevance function node. If the optional boost variable is provided, the boost argument of the
+   * relevance function is combined.
    *
-   * @param node    score function node
+   * @param node score function node
    * @param context analysis context for the query
    * @return resolved relevance function
    */
   public Expression visitScoreFunction(ScoreFunction node, AnalysisContext context) {
     Literal boostArg = node.getRelevanceFieldWeight();
     if (!boostArg.getType().equals(DataType.DOUBLE)) {
-      throw new SemanticCheckException(String.format("Expected boost type '%s' but got '%s'",
-          DataType.DOUBLE.name(), boostArg.getType().name()));
+      throw new SemanticCheckException(
+          String.format(
+              "Expected boost type '%s' but got '%s'",
+              DataType.DOUBLE.name(), boostArg.getType().name()));
     }
     Double thisBoostValue = ((Double) boostArg.getValue());
 
@@ -249,10 +253,9 @@ public Expression visitScoreFunction(ScoreFunction node, AnalysisContext context
         Literal boostArgLiteral = (Literal) ((UnresolvedArgument) expr).getValue();
         Double boostValue =
             Double.parseDouble((String) boostArgLiteral.getValue()) * thisBoostValue;
-        UnresolvedArgument newBoostArg = new UnresolvedArgument(
-                argumentName,
-                new Literal(boostValue.toString(), DataType.STRING)
-        );
+        UnresolvedArgument newBoostArg =
+            new UnresolvedArgument(
+                argumentName, new Literal(boostValue.toString(), DataType.STRING));
         updatedFuncArgs.add(newBoostArg);
       } else {
         updatedFuncArgs.add(expr);
@@ -261,18 +264,18 @@ public Expression visitScoreFunction(ScoreFunction node, AnalysisContext context
 
     // since nothing was found, add an argument
     if (!doesFunctionContainBoostArgument) {
-      UnresolvedArgument newBoostArg = new UnresolvedArgument(
+      UnresolvedArgument newBoostArg =
+          new UnresolvedArgument(
               "boost", new Literal(Double.toString(thisBoostValue), DataType.STRING));
       updatedFuncArgs.add(newBoostArg);
     }
 
     // create a new function expression with boost argument and resolve it
-    Function updatedRelevanceQueryUnresolvedExpr = new Function(
-            relevanceQueryUnresolvedExpr.getFuncName(),
-            updatedFuncArgs);
+    Function updatedRelevanceQueryUnresolvedExpr =
+        new Function(relevanceQueryUnresolvedExpr.getFuncName(), updatedFuncArgs);
     OpenSearchFunctions.OpenSearchFunction relevanceQueryExpr =
-            (OpenSearchFunctions.OpenSearchFunction) updatedRelevanceQueryUnresolvedExpr
-                    .accept(this, context);
+        (OpenSearchFunctions.OpenSearchFunction)
+            updatedRelevanceQueryUnresolvedExpr.accept(this, context);
     relevanceQueryExpr.setScoreTracked(true);
     return relevanceQueryExpr;
   }
@@ -301,16 +304,16 @@ public Expression visitCompare(Compare node, AnalysisContext context) {
     Expression left = analyze(node.getLeft(), context);
     Expression right = analyze(node.getRight(), context);
     return (Expression)
-        repository.compile(context.getFunctionProperties(),
-            functionName, Arrays.asList(left, right));
+        repository.compile(
+            context.getFunctionProperties(), functionName, Arrays.asList(left, right));
   }
 
   @Override
   public Expression visitBetween(Between node, AnalysisContext context) {
     return and(
-        compare(">=", node.getValue(), node.getLowerBound()),
-        compare("<=", node.getValue(), node.getUpperBound())
-    ).accept(this, context);
+            compare(">=", node.getValue(), node.getLowerBound()),
+            compare("<=", node.getValue(), node.getUpperBound()))
+        .accept(this, context);
   }
 
   @Override
@@ -321,16 +324,18 @@ public Expression visitCase(Case node, AnalysisContext context) {
         whens.add((WhenClause) analyze(when, context));
       } else {
         // Merge case value and condition (compare value) into a single equal condition
-        whens.add((WhenClause) analyze(
-            new When(
-                new Function("=", Arrays.asList(node.getCaseValue(), when.getCondition())),
-                when.getResult()
-            ), context));
+        whens.add(
+            (WhenClause)
+                analyze(
+                    new When(
+                        new Function("=", Arrays.asList(node.getCaseValue(), when.getCondition())),
+                        when.getResult()),
+                    context));
       }
     }
 
-    Expression defaultResult = (node.getElseClause() == null)
-        ? null : analyze(node.getElseClause(), context);
+    Expression defaultResult =
+        (node.getElseClause() == null) ? null : analyze(node.getElseClause(), context);
     CaseClause caseClause = new CaseClause(whens, defaultResult);
 
     // To make this simple, require all result type same regardless of implicit convert
@@ -346,8 +351,7 @@ public Expression visitCase(Case node, AnalysisContext context) {
   @Override
   public Expression visitWhen(When node, AnalysisContext context) {
     return new WhenClause(
-        analyze(node.getCondition(), context),
-        analyze(node.getResult(), context));
+        analyze(node.getCondition(), context), analyze(node.getResult(), context));
   }
 
   @Override
@@ -371,16 +375,13 @@ public Expression visitQualifiedName(QualifiedName node, AnalysisContext context
     // check for reserved words in the identifier
     for (String part : node.getParts()) {
       for (TypeEnvironment typeEnv = context.peek();
-           typeEnv != null;
-           typeEnv = typeEnv.getParent()) {
-        Optional exprType = typeEnv.getReservedSymbolTable().lookup(
-            new Symbol(Namespace.FIELD_NAME, part));
+          typeEnv != null;
+          typeEnv = typeEnv.getParent()) {
+        Optional exprType =
+            Optional.ofNullable(typeEnv.lookupAllFields(Namespace.HIDDEN_FIELD_NAME).get(part));
         if (exprType.isPresent()) {
           return visitMetadata(
-              qualifierAnalyzer.unqualified(node),
-              (ExprCoreType) exprType.get(),
-              context
-          );
+              qualifierAnalyzer.unqualified(node), (ExprCoreType) exprType.get(), context);
         }
       }
     }
@@ -401,15 +402,15 @@ public Expression visitUnresolvedArgument(UnresolvedArgument node, AnalysisConte
   }
 
   /**
-   * If QualifiedName is actually a reserved metadata field, return the expr type associated
-   * with the metadata field.
-   * @param ident   metadata field name
+   * If QualifiedName is actually a reserved metadata field, return the expr type associated with
+   * the metadata field.
+   *
+   * @param ident metadata field name
    * @param context analysis context
    * @return DSL reference
    */
-  private Expression visitMetadata(String ident,
-                                   ExprCoreType exprCoreType,
-                                   AnalysisContext context) {
+  private Expression visitMetadata(
+      String ident, ExprCoreType exprCoreType, AnalysisContext context) {
     return DSL.ref(ident, exprCoreType);
   }
 
@@ -422,8 +423,8 @@ private Expression visitIdentifier(String ident, AnalysisContext context) {
     }
 
     TypeEnvironment typeEnv = context.peek();
-    ReferenceExpression ref = DSL.ref(ident,
-        typeEnv.resolve(new Symbol(Namespace.FIELD_NAME, ident)));
+    ReferenceExpression ref =
+        DSL.ref(ident, typeEnv.resolve(new Symbol(Namespace.FIELD_NAME, ident)));
 
     return ref;
   }
diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java
index eaf5c4abca..398f848f16 100644
--- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java
+++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizer.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.analysis;
 
 import java.util.HashMap;
@@ -26,25 +25,25 @@
 import org.opensearch.sql.planner.logical.LogicalWindow;
 
 /**
- * The optimizer used to replace the expression referred in the SelectClause
- * e.g. The query SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name).
- * will be translated the AST
- * Project[abs(age), sub(sum(age), avg(age))
- *  Agg(agg=[sum(age), avg(age)], group=[abs(age)]]
- *   Relation
- * The sum(age) and avg(age) in the Project could be replace by the analyzed reference, the
- * LogicalPlan should be
- * LogicalProject[Ref("abs(age)"), sub(Ref("sum(age)"), Ref("avg(age)"))
- *  LogicalAgg(agg=[sum(age), avg(age)], group=[abs(age)]]
- *   LogicalRelation
+ * The optimizer used to replace the expression referred in the SelectClause
e.g. The query + * SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name).
+ * will be translated the AST
+ * Project[abs(age), sub(sum(age), avg(age))
+ * &ensp Agg(agg=[sum(age), avg(age)], group=[abs(age)]]
+ * &emsp Relation
+ * The sum(age) and avg(age) in the Project could be replaced by the analyzed reference, the + * LogicalPlan should be
+ * LogicalProject[Ref("abs(age)"), sub(Ref("sum(age)"), Ref("avg(age)"))
+ * &ensp LogicalAgg(agg=[sum(age), avg(age)], group=[abs(age)]]
+ * &emsp LogicalRelation */ public class ExpressionReferenceOptimizer extends ExpressionNodeVisitor { private final BuiltinFunctionRepository repository; /** - * The map of expression and it's reference. - * For example, The NamedAggregator should produce the map of Aggregator to Ref(name) + * The map of expression and it's reference. For example, The NamedAggregator should produce the + * map of Aggregator to Ref(name) */ private final Map expressionMap = new HashMap<>(); @@ -69,17 +68,16 @@ public Expression visitFunction(FunctionExpression node, AnalysisContext context return expressionMap.get(node); } else { final List args = - node.getArguments().stream().map(expr -> expr.accept(this, context)) + node.getArguments().stream() + .map(expr -> expr.accept(this, context)) .collect(Collectors.toList()); - Expression optimizedFunctionExpression = (Expression) repository.compile( - context.getFunctionProperties(), - node.getFunctionName(), - args - ); + Expression optimizedFunctionExpression = + (Expression) + repository.compile(context.getFunctionProperties(), node.getFunctionName(), args); // Propagate scoreTracked for OpenSearch functions if (optimizedFunctionExpression instanceof OpenSearchFunctions.OpenSearchFunction) { - ((OpenSearchFunctions.OpenSearchFunction) optimizedFunctionExpression).setScoreTracked( - ((OpenSearchFunctions.OpenSearchFunction)node).isScoreTracked()); + ((OpenSearchFunctions.OpenSearchFunction) optimizedFunctionExpression) + .setScoreTracked(((OpenSearchFunctions.OpenSearchFunction) node).isScoreTracked()); } return optimizedFunctionExpression; } @@ -98,19 +96,17 @@ public Expression visitNamed(NamedExpression node, AnalysisContext context) { return node.getDelegated().accept(this, context); } - /** - * Implement this because Case/When is not registered in function repository. - */ + /** Implement this because Case/When is not registered in function repository. */ @Override public Expression visitCase(CaseClause node, AnalysisContext context) { if (expressionMap.containsKey(node)) { return expressionMap.get(node); } - List whenClauses = node.getWhenClauses() - .stream() - .map(expr -> (WhenClause) expr.accept(this, context)) - .collect(Collectors.toList()); + List whenClauses = + node.getWhenClauses().stream() + .map(expr -> (WhenClause) expr.accept(this, context)) + .collect(Collectors.toList()); Expression defaultResult = null; if (node.getDefaultResult() != null) { defaultResult = node.getDefaultResult().accept(this, context); @@ -121,14 +117,10 @@ public Expression visitCase(CaseClause node, AnalysisContext context) { @Override public Expression visitWhen(WhenClause node, AnalysisContext context) { return new WhenClause( - node.getCondition().accept(this, context), - node.getResult().accept(this, context)); + node.getCondition().accept(this, context), node.getResult().accept(this, context)); } - - /** - * Expression Map Builder. - */ + /** Expression Map Builder. */ class ExpressionMapBuilder extends LogicalPlanNodeVisitor { @Override @@ -140,20 +132,27 @@ public Void visitNode(LogicalPlan plan, Void context) { @Override public Void visitAggregation(LogicalAggregation plan, Void context) { // Create the mapping for all the aggregator. - plan.getAggregatorList().forEach(namedAggregator -> expressionMap - .put(namedAggregator.getDelegated(), - new ReferenceExpression(namedAggregator.getName(), namedAggregator.type()))); + plan.getAggregatorList() + .forEach( + namedAggregator -> + expressionMap.put( + namedAggregator.getDelegated(), + new ReferenceExpression(namedAggregator.getName(), namedAggregator.type()))); // Create the mapping for all the group by. - plan.getGroupByList().forEach(groupBy -> expressionMap - .put(groupBy.getDelegated(), - new ReferenceExpression(groupBy.getNameOrAlias(), groupBy.type()))); + plan.getGroupByList() + .forEach( + groupBy -> + expressionMap.put( + groupBy.getDelegated(), + new ReferenceExpression(groupBy.getNameOrAlias(), groupBy.type()))); return null; } @Override public Void visitWindow(LogicalWindow plan, Void context) { Expression windowFunc = plan.getWindowFunction(); - expressionMap.put(windowFunc, + expressionMap.put( + windowFunc, new ReferenceExpression(((NamedExpression) windowFunc).getName(), windowFunc.type())); return visitNode(plan, context); } diff --git a/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java index 0a15c6bac8..386c6e9d9f 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/HighlightAnalyzer.java @@ -14,10 +14,7 @@ import org.opensearch.sql.planner.logical.LogicalHighlight; import org.opensearch.sql.planner.logical.LogicalPlan; -/** - * Analyze the highlight in the {@link AnalysisContext} to construct the {@link - * LogicalPlan}. - */ +/** Analyze the highlight in the {@link AnalysisContext} to construct the {@link LogicalPlan}. */ @RequiredArgsConstructor public class HighlightAnalyzer extends AbstractNodeVisitor { private final ExpressionAnalyzer expressionAnalyzer; diff --git a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java index 1d318c5588..43bd411b42 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java @@ -3,33 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import lombok.RequiredArgsConstructor; import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.Alias; import org.opensearch.sql.ast.expression.QualifiedName; -import org.opensearch.sql.ast.expression.Span; import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.NamedExpression; -import org.opensearch.sql.expression.span.SpanExpression; /** - * Analyze the Alias node in the {@link AnalysisContext} to construct the list of - * {@link NamedExpression}. + * Analyze the Alias node in the {@link AnalysisContext} to construct the list of {@link + * NamedExpression}. */ @RequiredArgsConstructor -public class NamedExpressionAnalyzer extends - AbstractNodeVisitor { +public class NamedExpressionAnalyzer extends AbstractNodeVisitor { private final ExpressionAnalyzer expressionAnalyzer; - /** - * Analyze Select fields. - */ - public NamedExpression analyze(UnresolvedExpression expression, - AnalysisContext analysisContext) { + /** Analyze Select fields. */ + public NamedExpression analyze(UnresolvedExpression expression, AnalysisContext analysisContext) { return expression.accept(this, analysisContext); } diff --git a/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java index f050824557..ef8f142801 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/NestedAnalyzer.java @@ -27,8 +27,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; /** - * Analyze the Nested Function in the {@link AnalysisContext} to construct the {@link - * LogicalPlan}. + * Analyze the Nested Function in the {@link AnalysisContext} to construct the {@link LogicalPlan}. */ @RequiredArgsConstructor public class NestedAnalyzer extends AbstractNodeVisitor { @@ -52,15 +51,15 @@ public LogicalPlan visitNestedAllTupleFields(NestedAllTupleFields node, Analysis for (NamedExpression namedExpr : namedExpressions) { if (isNestedFunction(namedExpr.getDelegated())) { ReferenceExpression field = - (ReferenceExpression) ((FunctionExpression) namedExpr.getDelegated()) - .getArguments().get(0); + (ReferenceExpression) + ((FunctionExpression) namedExpr.getDelegated()).getArguments().get(0); // If path is same as NestedAllTupleFields path - if (field.getAttr().substring(0, field.getAttr().lastIndexOf(".")) + if (field + .getAttr() + .substring(0, field.getAttr().lastIndexOf(".")) .equalsIgnoreCase(node.getPath())) { - args.add(Map.of( - "field", field, - "path", new ReferenceExpression(node.getPath(), STRING))); + args.add(Map.of("field", field, "path", new ReferenceExpression(node.getPath(), STRING))); } } } @@ -75,20 +74,24 @@ public LogicalPlan visitFunction(Function node, AnalysisContext context) { List expressions = node.getFuncArgs(); validateArgs(expressions); ReferenceExpression nestedField = - (ReferenceExpression)expressionAnalyzer.analyze(expressions.get(0), context); + (ReferenceExpression) expressionAnalyzer.analyze(expressions.get(0), context); Map args; // Path parameter is supplied if (expressions.size() == 2) { - args = Map.of( - "field", nestedField, - "path", (ReferenceExpression)expressionAnalyzer.analyze(expressions.get(1), context) - ); + args = + Map.of( + "field", + nestedField, + "path", + (ReferenceExpression) expressionAnalyzer.analyze(expressions.get(1), context)); } else { - args = Map.of( - "field", (ReferenceExpression)expressionAnalyzer.analyze(expressions.get(0), context), - "path", generatePath(nestedField.toString()) - ); + args = + Map.of( + "field", + (ReferenceExpression) expressionAnalyzer.analyze(expressions.get(0), context), + "path", + generatePath(nestedField.toString())); } return mergeChildIfLogicalNested(new ArrayList<>(Arrays.asList(args))); @@ -97,8 +100,9 @@ public LogicalPlan visitFunction(Function node, AnalysisContext context) { } /** - * NestedAnalyzer visits all functions in SELECT clause, creates logical plans for each and - * merges them. This is to avoid another merge rule in LogicalPlanOptimizer:create(). + * NestedAnalyzer visits all functions in SELECT clause, creates logical plans for each and merges + * them. This is to avoid another merge rule in LogicalPlanOptimizer:create(). + * * @param args field and path params to add to logical plan. * @return child of logical nested with added args, or new LogicalNested. */ @@ -113,34 +117,33 @@ private LogicalPlan mergeChildIfLogicalNested(List args) { if (args.size() < 1 || args.size() > 2) { throw new IllegalArgumentException( - "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)" - ); + "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)"); } for (int i = 0; i < args.size(); i++) { if (!(args.get(i) instanceof QualifiedName)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", args.get(i).toString()) - ); + String.format("Illegal nested field name: %s", args.get(i).toString())); } - if (i == 0 && ((QualifiedName)args.get(i)).getParts().size() < 2) { + if (i == 0 && ((QualifiedName) args.get(i)).getParts().size() < 2) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", args.get(i).toString()) - ); + String.format("Illegal nested field name: %s", args.get(i).toString())); } } } /** * Generate nested path dynamically. Assumes at least one level of nesting in supplied string. + * * @param field : Nested field to generate path of. * @return : Path of field derived from last level of nesting. */ @@ -150,12 +153,15 @@ public static ReferenceExpression generatePath(String field) { /** * Check if supplied expression is a nested function. + * * @param expr Expression checking if is nested function. * @return True if expression is a nested function. */ public static Boolean isNestedFunction(Expression expr) { return (expr instanceof FunctionExpression - && ((FunctionExpression) expr).getFunctionName().getFunctionName() - .equalsIgnoreCase(BuiltinFunctionName.NESTED.name())); + && ((FunctionExpression) expr) + .getFunctionName() + .getFunctionName() + .equalsIgnoreCase(BuiltinFunctionName.NESTED.name())); } } diff --git a/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java index d1e31d0079..27dd6a2243 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/QualifierAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import java.util.Arrays; @@ -15,9 +14,7 @@ import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Analyzer that analyzes qualifier(s) in a full field name. - */ +/** Analyzer that analyzes qualifier(s) in a full field name. */ @RequiredArgsConstructor public class QualifierAnalyzer { @@ -28,11 +25,11 @@ public String unqualified(String... parts) { } /** - * Get unqualified name if its qualifier symbol found is in index namespace - * on type environment. Unqualified name means name with qualifier removed. - * For example, unqualified name of "accounts.age" or "acc.age" is "age". + * Get unqualified name if its qualifier symbol found is in index namespace on type environment. + * Unqualified name means name with qualifier removed. For example, unqualified name of + * "accounts.age" or "acc.age" is "age". * - * @return unqualified name if criteria met above, otherwise original name + * @return unqualified name if criteria met above, otherwise original name */ public String unqualified(QualifiedName fullName) { return isQualifierIndexOrAlias(fullName) ? fullName.rest().toString() : fullName.toString(); @@ -66,10 +63,11 @@ private void resolveQualifierSymbol(QualifiedName fullName, String qualifier) { } catch (SemanticCheckException e) { // Throw syntax check intentionally to indicate fall back to old engine. // Need change to semantic check exception in future. - throw new SyntaxCheckException(String.format( - "The qualifier [%s] of qualified name [%s] must be an field name, index name or its " - + "alias", qualifier, fullName)); + throw new SyntaxCheckException( + String.format( + "The qualifier [%s] of qualified name [%s] must be an field name, index name or its " + + "alias", + qualifier, fullName)); } } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java index 734f37378b..5e46cfa629 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/SelectExpressionAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import com.google.common.collect.ImmutableList; @@ -30,23 +29,21 @@ import org.opensearch.sql.expression.ReferenceExpression; /** - * Analyze the select list in the {@link AnalysisContext} to construct the list of - * {@link NamedExpression}. + * Analyze the select list in the {@link AnalysisContext} to construct the list of {@link + * NamedExpression}. */ @RequiredArgsConstructor public class SelectExpressionAnalyzer - extends - AbstractNodeVisitor, AnalysisContext> { + extends AbstractNodeVisitor, AnalysisContext> { private final ExpressionAnalyzer expressionAnalyzer; private ExpressionReferenceOptimizer optimizer; - /** - * Analyze Select fields. - */ - public List analyze(List selectList, - AnalysisContext analysisContext, - ExpressionReferenceOptimizer optimizer) { + /** Analyze Select fields. */ + public List analyze( + List selectList, + AnalysisContext analysisContext, + ExpressionReferenceOptimizer optimizer) { this.optimizer = optimizer; ImmutableList.Builder builder = new ImmutableList.Builder<>(); for (UnresolvedExpression unresolvedExpression : selectList) { @@ -68,50 +65,49 @@ public List visitAlias(Alias node, AnalysisContext context) { } Expression expr = referenceIfSymbolDefined(node, context); - return Collections.singletonList(DSL.named( - unqualifiedNameIfFieldOnly(node, context), - expr, - node.getAlias())); + return Collections.singletonList( + DSL.named(unqualifiedNameIfFieldOnly(node, context), expr, node.getAlias())); } /** * The Alias could be - * 1. SELECT name, AVG(age) FROM s BY name -> - * Project(Alias("name", expr), Alias("AVG(age)", aggExpr)) - * Agg(Alias("AVG(age)", aggExpr)) - * 2. SELECT length(name), AVG(age) FROM s BY length(name) - * Project(Alias("name", expr), Alias("AVG(age)", aggExpr)) - * Agg(Alias("AVG(age)", aggExpr)) - * 3. SELECT length(name) as l, AVG(age) FROM s BY l - * Project(Alias("name", expr, l), Alias("AVG(age)", aggExpr)) - * Agg(Alias("AVG(age)", aggExpr), Alias("length(name)", groupExpr)) + * + *
    + *
  1. SELECT name, AVG(age) FROM s BY name -> Project(Alias("name", expr), Alias("AVG(age)", + * aggExpr)) Agg(Alias("AVG(age)", aggExpr)) + *
  2. SELECT length(name), AVG(age) FROM s BY length(name) Project(Alias("name", expr), + * Alias("AVG(age)", aggExpr)) Agg(Alias("AVG(age)", aggExpr)) + *
  3. SELECT length(name) as l, AVG(age) FROM s BY l Project(Alias("name", expr, l), + * Alias("AVG(age)", aggExpr)) Agg(Alias("AVG(age)", aggExpr), Alias("length(name)", + * groupExpr)) + *
*/ - private Expression referenceIfSymbolDefined(Alias expr, - AnalysisContext context) { + private Expression referenceIfSymbolDefined(Alias expr, AnalysisContext context) { UnresolvedExpression delegatedExpr = expr.getDelegated(); // Pass named expression because expression like window function loses full name // (OVER clause) and thus depends on name in alias to be replaced correctly return optimizer.optimize( DSL.named( - expr.getName(), - delegatedExpr.accept(expressionAnalyzer, context), - expr.getAlias()), + expr.getName(), delegatedExpr.accept(expressionAnalyzer, context), expr.getAlias()), context); } @Override - public List visitAllFields(AllFields node, - AnalysisContext context) { + public List visitAllFields(AllFields node, AnalysisContext context) { TypeEnvironment environment = context.peek(); Map lookupAllFields = environment.lookupAllFields(Namespace.FIELD_NAME); - return lookupAllFields.entrySet().stream().map(entry -> DSL.named(entry.getKey(), - new ReferenceExpression(entry.getKey(), entry.getValue()))).collect(Collectors.toList()); + return lookupAllFields.entrySet().stream() + .map( + entry -> + DSL.named( + entry.getKey(), new ReferenceExpression(entry.getKey(), entry.getValue()))) + .collect(Collectors.toList()); } @Override - public List visitNestedAllTupleFields(NestedAllTupleFields node, - AnalysisContext context) { + public List visitNestedAllTupleFields( + NestedAllTupleFields node, AnalysisContext context) { TypeEnvironment environment = context.peek(); Map lookupAllTupleFields = environment.lookupAllTupleFields(Namespace.FIELD_NAME); @@ -121,25 +117,25 @@ public List visitNestedAllTupleFields(NestedAllTupleFields node Pattern p = Pattern.compile(node.getPath() + "\\.[^\\.]+$"); return lookupAllTupleFields.entrySet().stream() .filter(field -> p.matcher(field.getKey()).find()) - .map(entry -> { - Expression nestedFunc = new Function( - "nested", - List.of( - new QualifiedName(List.of(entry.getKey().split("\\.")))) - ).accept(expressionAnalyzer, context); - return DSL.named("nested(" + entry.getKey() + ")", nestedFunc); - }) + .map( + entry -> { + Expression nestedFunc = + new Function( + "nested", + List.of(new QualifiedName(List.of(entry.getKey().split("\\."))))) + .accept(expressionAnalyzer, context); + return DSL.named("nested(" + entry.getKey() + ")", nestedFunc); + }) .collect(Collectors.toList()); } /** - * Get unqualified name if select item is just a field. For example, suppose an index - * named "accounts", return "age" for "SELECT accounts.age". But do nothing for expression - * in "SELECT ABS(accounts.age)". - * Note that an assumption is made implicitly that original name field in Alias must be - * the same as the values in QualifiedName. This is true because AST builder does this. - * Otherwise, what unqualified() returns will override Alias's name as NamedExpression's name - * even though the QualifiedName doesn't have qualifier. + * Get unqualified name if select item is just a field. For example, suppose an index named + * "accounts", return "age" for "SELECT accounts.age". But do nothing for expression in "SELECT + * ABS(accounts.age)". Note that an assumption is made implicitly that original name field in + * Alias must be the same as the values in QualifiedName. This is true because AST builder does + * this. Otherwise, what unqualified() returns will override Alias's name as NamedExpression's + * name even though the QualifiedName doesn't have qualifier. */ private String unqualifiedNameIfFieldOnly(Alias node, AnalysisContext context) { UnresolvedExpression selectItem = node.getDelegated(); @@ -149,5 +145,4 @@ private String unqualifiedNameIfFieldOnly(Alias node, AnalysisContext context) { } return node.getName(); } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java index 17d203f66f..18693a63e6 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java +++ b/core/src/main/java/org/opensearch/sql/analysis/TypeEnvironment.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.analysis.symbol.Namespace.FIELD_NAME; @@ -21,17 +20,11 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.env.Environment; -/** - * The definition of Type Environment. - */ +/** The definition of Type Environment. */ public class TypeEnvironment implements Environment { - @Getter - private final TypeEnvironment parent; + @Getter private final TypeEnvironment parent; private final SymbolTable symbolTable; - @Getter - private final SymbolTable reservedSymbolTable; - /** * Constructor with empty symbol tables. * @@ -40,7 +33,6 @@ public class TypeEnvironment implements Environment { public TypeEnvironment(TypeEnvironment parent) { this.parent = parent; this.symbolTable = new SymbolTable(); - this.reservedSymbolTable = new SymbolTable(); } /** @@ -52,7 +44,6 @@ public TypeEnvironment(TypeEnvironment parent) { public TypeEnvironment(TypeEnvironment parent, SymbolTable symbolTable) { this.parent = parent; this.symbolTable = symbolTable; - this.reservedSymbolTable = new SymbolTable(); } /** @@ -69,15 +60,14 @@ public ExprType resolve(Symbol symbol) { return typeOptional.get(); } } - throw new SemanticCheckException( - String.format("can't resolve %s in type env", symbol)); + throw new SemanticCheckException(String.format("can't resolve %s in type env", symbol)); } /** * Resolve all fields in the current environment. * - * @param namespace a namespace - * @return all symbols in the namespace + * @param namespace a namespace + * @return all symbols in the namespace */ public Map lookupAllFields(Namespace namespace) { Map result = new LinkedHashMap<>(); @@ -87,8 +77,9 @@ public Map lookupAllFields(Namespace namespace) { /** * Resolve all fields in the current environment. - * @param namespace a namespace - * @return all symbols in the namespace + * + * @param namespace a namespace + * @return all symbols in the namespace */ public Map lookupAllTupleFields(Namespace namespace) { Map result = new LinkedHashMap<>(); @@ -100,7 +91,7 @@ public Map lookupAllTupleFields(Namespace namespace) { * Define symbol with the type. * * @param symbol symbol to define - * @param type type + * @param type type */ public void define(Symbol symbol, ExprType type) { symbolTable.store(symbol, type); @@ -119,22 +110,13 @@ public void remove(Symbol symbol) { symbolTable.remove(symbol); } - /** - * Remove ref. - */ + /** Remove ref. */ public void remove(ReferenceExpression ref) { remove(new Symbol(FIELD_NAME, ref.getAttr())); } - /** - * Clear all fields in the current environment. - */ + /** Clear all fields in the current environment. */ public void clearAllFields() { - lookupAllFields(FIELD_NAME).keySet().forEach( - v -> remove(new Symbol(Namespace.FIELD_NAME, v))); - } - - public void addReservedWord(Symbol symbol, ExprType type) { - reservedSymbolTable.store(symbol, type); + lookupAllFields(FIELD_NAME).keySet().forEach(v -> remove(new Symbol(Namespace.FIELD_NAME, v))); } } diff --git a/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java index 3abcf9e140..c4229e4664 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/WindowExpressionAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.ast.tree.Sort.SortOption.DEFAULT_ASC; @@ -29,28 +28,25 @@ import org.opensearch.sql.planner.logical.LogicalWindow; /** - * Window expression analyzer that analyzes window function expression in expression list - * in project operator. + * Window expression analyzer that analyzes window function expression in expression list in project + * operator. */ @RequiredArgsConstructor public class WindowExpressionAnalyzer extends AbstractNodeVisitor { - /** - * Expression analyzer. - */ + /** Expression analyzer. */ private final ExpressionAnalyzer expressionAnalyzer; - /** - * Child node to be wrapped by a new window operator. - */ + /** Child node to be wrapped by a new window operator. */ private final LogicalPlan child; /** - * Analyze the given project item and return window operator (with child node inside) - * if the given project item is a window function. - * @param projectItem project item - * @param context analysis context - * @return window operator or original child if not windowed + * Analyze the given project item and return window operator (with child node inside) if the given + * project item is a window function. + * + * @param projectItem project item + * @param context analysis context + * @return window operator or original child if not windowed */ public LogicalPlan analyze(UnresolvedExpression projectItem, AnalysisContext context) { LogicalPlan window = projectItem.accept(this, context); @@ -77,26 +73,24 @@ public LogicalPlan visitAlias(Alias node, AnalysisContext context) { return new LogicalWindow(child, namedWindowFunction, windowDefinition); } return new LogicalWindow( - new LogicalSort(child, allSortItems), - namedWindowFunction, - windowDefinition); + new LogicalSort(child, allSortItems), namedWindowFunction, windowDefinition); } private List analyzePartitionList(WindowFunction node, AnalysisContext context) { - return node.getPartitionByList() - .stream() - .map(expr -> expressionAnalyzer.analyze(expr, context)) - .collect(Collectors.toList()); + return node.getPartitionByList().stream() + .map(expr -> expressionAnalyzer.analyze(expr, context)) + .collect(Collectors.toList()); } - private List> analyzeSortList(WindowFunction node, - AnalysisContext context) { - return node.getSortList() - .stream() - .map(pair -> ImmutablePair - .of(analyzeSortOption(pair.getLeft()), - expressionAnalyzer.analyze(pair.getRight(), context))) - .collect(Collectors.toList()); + private List> analyzeSortList( + WindowFunction node, AnalysisContext context) { + return node.getSortList().stream() + .map( + pair -> + ImmutablePair.of( + analyzeSortOption(pair.getLeft()), + expressionAnalyzer.analyze(pair.getRight(), context))) + .collect(Collectors.toList()); } /** @@ -107,9 +101,6 @@ private SortOption analyzeSortOption(SortOption option) { if (option.getNullOrder() == null) { return (option.getSortOrder() == DESC) ? DEFAULT_DESC : DEFAULT_ASC; } - return new SortOption( - (option.getSortOrder() == DESC) ? DESC : ASC, - option.getNullOrder()); + return new SortOption((option.getSortOrder() == DESC) ? DESC : ASC, option.getNullOrder()); } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java index b5203033a8..e8a7454014 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/Namespace.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; -/** - * Namespace of symbol to avoid naming conflict. - */ +/** Namespace of symbol to avoid naming conflict. */ public enum Namespace { - INDEX_NAME("Index"), FIELD_NAME("Field"), + HIDDEN_FIELD_NAME("HiddenField"), FUNCTION_NAME("Function"); private final String name; @@ -20,5 +17,4 @@ public enum Namespace { Namespace(String name) { this.name = name; } - } diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java index 8cc9505710..98fa4b3569 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/Symbol.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; -/** - * Symbol in the scope. - */ +/** Symbol in the scope. */ @ToString @Getter @RequiredArgsConstructor diff --git a/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java b/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java index be7435c288..8bb6824a63 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java +++ b/core/src/main/java/org/opensearch/sql/analysis/symbol/SymbolTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; import static java.util.Collections.emptyMap; @@ -17,21 +16,16 @@ import java.util.TreeMap; import org.opensearch.sql.data.type.ExprType; -/** - * Symbol table for symbol definition and resolution. - */ +/** Symbol table for symbol definition and resolution. */ public class SymbolTable { - /** - * Two-dimension hash table to manage symbols with type in different namespace. - */ + /** Two-dimension hash table to manage symbols with type in different namespace. */ private Map> tableByNamespace = new EnumMap<>(Namespace.class); /** - * Two-dimension hash table to manage symbols with type in different namespace. - * Comparing with tableByNamespace, orderedTable use the LinkedHashMap to keep the order of - * symbol. + * Two-dimension hash table to manage symbols with type in different namespace. Comparing with + * tableByNamespace, orderedTable use the LinkedHashMap to keep the order of symbol. */ private Map> orderedTable = new EnumMap<>(Namespace.class); @@ -40,38 +34,32 @@ public class SymbolTable { * Store symbol with the type. Create new map for namespace for the first time. * * @param symbol symbol to define - * @param type symbol type + * @param type symbol type */ public void store(Symbol symbol, ExprType type) { - tableByNamespace.computeIfAbsent( - symbol.getNamespace(), - ns -> new TreeMap<>() - ).put(symbol.getName(), type); + tableByNamespace + .computeIfAbsent(symbol.getNamespace(), ns -> new TreeMap<>()) + .put(symbol.getName(), type); - orderedTable.computeIfAbsent( - symbol.getNamespace(), - ns -> new LinkedHashMap<>() - ).put(symbol.getName(), type); + orderedTable + .computeIfAbsent(symbol.getNamespace(), ns -> new LinkedHashMap<>()) + .put(symbol.getName(), type); } - /** - * Remove a symbol from SymbolTable. - */ + /** Remove a symbol from SymbolTable. */ public void remove(Symbol symbol) { tableByNamespace.computeIfPresent( symbol.getNamespace(), (k, v) -> { v.remove(symbol.getName()); return v; - } - ); + }); orderedTable.computeIfPresent( symbol.getNamespace(), (k, v) -> { v.remove(symbol.getName()); return v; - } - ); + }); } /** @@ -104,42 +92,42 @@ public Map lookupByPrefix(Symbol prefix) { } /** - * Look up all top level symbols in the namespace. - * this function is mainly used by SELECT * use case to get the top level fields - * Todo. currently, the top level fields is the field which doesn't include "." in the name or - * the prefix doesn't exist in the symbol table. - * e.g. The symbol table includes person, person.name, person/2.0. - * person, is the top level field - * person.name, isn't the top level field, because the prefix (person) in symbol table - * person/2.0, is the top level field, because the prefix (person/2) isn't in symbol table + * Look up all top level symbols in the namespace. this function is mainly used by SELECT * use + * case to get the top level fields Todo. currently, the top level fields is the field which + * doesn't include "." in the name or the prefix doesn't exist in the symbol table. e.g. The + * symbol table includes person, person.name, person/2.0. person, is the top level field + * person.name, isn't the top level field, because the prefix (person) in symbol table person/2.0, + * is the top level field, because the prefix (person/2) isn't in symbol table * - * @param namespace a namespace - * @return all symbols in the namespace map + * @param namespace a namespace + * @return all symbols in the namespace map */ public Map lookupAllFields(Namespace namespace) { final LinkedHashMap allSymbols = orderedTable.getOrDefault(namespace, new LinkedHashMap<>()); final LinkedHashMap results = new LinkedHashMap<>(); - allSymbols.entrySet().stream().filter(entry -> { - String symbolName = entry.getKey(); - int lastDot = symbolName.lastIndexOf("."); - return -1 == lastDot || !allSymbols.containsKey(symbolName.substring(0, lastDot)); - }).forEach(entry -> results.put(entry.getKey(), entry.getValue())); + allSymbols.entrySet().stream() + .filter( + entry -> { + String symbolName = entry.getKey(); + int lastDot = symbolName.lastIndexOf("."); + return -1 == lastDot || !allSymbols.containsKey(symbolName.substring(0, lastDot)); + }) + .forEach(entry -> results.put(entry.getKey(), entry.getValue())); return results; } /** * Look up all top level symbols in the namespace. * - * @param namespace a namespace - * @return all symbols in the namespace map + * @param namespace a namespace + * @return all symbols in the namespace map */ public Map lookupAllTupleFields(Namespace namespace) { final LinkedHashMap allSymbols = orderedTable.getOrDefault(namespace, new LinkedHashMap<>()); final LinkedHashMap result = new LinkedHashMap<>(); - allSymbols.entrySet().stream() - .forEach(entry -> result.put(entry.getKey(), entry.getValue())); + allSymbols.entrySet().stream().forEach(entry -> result.put(entry.getKey(), entry.getValue())); return result; } diff --git a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java index f02bc07ccc..973b10310b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast; import org.opensearch.sql.ast.expression.AggregateFunction; @@ -62,9 +61,7 @@ import org.opensearch.sql.ast.tree.TableFunction; import org.opensearch.sql.ast.tree.Values; -/** - * AST nodes visitor Defines the traverse path. - */ +/** AST nodes visitor Defines the traverse path. */ public abstract class AbstractNodeVisitor { public T visit(Node node, C context) { @@ -73,6 +70,7 @@ public T visit(Node node, C context) { /** * Visit child node. + * * @param node {@link Node} * @param context Context * @return Return Type. diff --git a/core/src/main/java/org/opensearch/sql/ast/Node.java b/core/src/main/java/org/opensearch/sql/ast/Node.java index f3147eeb43..faaf51f221 100644 --- a/core/src/main/java/org/opensearch/sql/ast/Node.java +++ b/core/src/main/java/org/opensearch/sql/ast/Node.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast; import java.util.List; import lombok.EqualsAndHashCode; import lombok.ToString; -/** - * AST node. - */ +/** AST node. */ @EqualsAndHashCode @ToString public abstract class Node { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java index e8f730d7e9..5208e39623 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/AggregateFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -17,8 +16,8 @@ import org.opensearch.sql.common.utils.StringUtils; /** - * Expression node of aggregate functions. - * Params include aggregate function name (AVG, SUM, MAX etc.) and the field to aggregate. + * Expression node of aggregate functions. Params include aggregate function name (AVG, SUM, MAX + * etc.) and the field to aggregate. */ @Getter @EqualsAndHashCode(callSuper = false) @@ -27,13 +26,16 @@ public class AggregateFunction extends UnresolvedExpression { private final String funcName; private final UnresolvedExpression field; private final List argList; + @Setter @Accessors(fluent = true) private UnresolvedExpression condition; + private Boolean distinct = false; /** * Constructor. + * * @param funcName function name. * @param field {@link UnresolvedExpression}. */ @@ -45,6 +47,7 @@ public AggregateFunction(String funcName, UnresolvedExpression field) { /** * Constructor. + * * @param funcName function name. * @param field {@link UnresolvedExpression}. * @param distinct whether distinct field is specified or not. diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java b/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java index 4183b19a3e..7b3078629b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Alias.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.AllArgsConstructor; @@ -14,10 +13,10 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Alias abstraction that associate an unnamed expression with a name and an optional alias. - * The name and alias information preserved is useful for semantic analysis and response - * formatting eventually. This can avoid restoring the info in toString() method which is - * inaccurate because original info is already lost. + * Alias abstraction that associate an unnamed expression with a name and an optional alias. The + * name and alias information preserved is useful for semantic analysis and response formatting + * eventually. This can avoid restoring the info in toString() method which is inaccurate because + * original info is already lost. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @@ -26,19 +25,13 @@ @ToString public class Alias extends UnresolvedExpression { - /** - * Original field name. - */ + /** Original field name. */ private final String name; - /** - * Expression aliased. - */ + /** Expression aliased. */ private final UnresolvedExpression delegated; - /** - * Optional field alias. - */ + /** Optional field alias. */ private String alias; @Override diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java b/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java index 1f5d919817..b9b90ea24a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/AllFields.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -13,16 +12,13 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Represent the All fields which is been used in SELECT *. - */ +/** Represent the All fields which is been used in SELECT *. */ @ToString @EqualsAndHashCode(callSuper = false) public class AllFields extends UnresolvedExpression { public static final AllFields INSTANCE = new AllFields(); - private AllFields() { - } + private AllFields() {} public static AllFields of() { return INSTANCE; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/And.java b/core/src/main/java/org/opensearch/sql/ast/expression/And.java index 8d8c48f3b2..565f1eb4ab 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/And.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/And.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of logic AND. - */ +/** Expression node of logic AND. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java b/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java index f054710a32..4c2a485ea7 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Argument.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Argument. - */ +/** Argument. */ @Getter @ToString @RequiredArgsConstructor diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java b/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java index 7e1fdb1516..d137a83957 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/AttributeList.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -14,15 +13,12 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node that includes a list of Expression nodes. - */ +/** Expression node that includes a list of Expression nodes. */ @ToString @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class AttributeList extends UnresolvedExpression { - @Getter - private List attrList; + @Getter private List attrList; @Override public List getChild() { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Between.java b/core/src/main/java/org/opensearch/sql/ast/expression/Between.java index 886c9a9282..e13c3fb187 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Between.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Between.java @@ -12,9 +12,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Unresolved expression for BETWEEN. - */ +/** Unresolved expression for BETWEEN. */ @Data @EqualsAndHashCode(callSuper = false) public class Between extends UnresolvedExpression { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Case.java b/core/src/main/java/org/opensearch/sql/ast/expression/Case.java index 81c74f3ea4..583bd0de97 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Case.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Case.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -15,29 +14,23 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node that represents CASE clause similar as Switch statement in programming language. - */ +/** AST node that represents CASE clause similar as Switch statement in programming language. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @Getter @ToString public class Case extends UnresolvedExpression { - /** - * Value to be compared by WHEN statements. Null in the case of CASE WHEN conditions. - */ + /** Value to be compared by WHEN statements. Null in the case of CASE WHEN conditions. */ private final UnresolvedExpression caseValue; /** - * Expression list that represents WHEN statements. Each is a mapping from condition - * to its result. + * Expression list that represents WHEN statements. Each is a mapping from condition to its + * result. */ private final List whenClauses; - /** - * Expression that represents ELSE statement result. - */ + /** Expression that represents ELSE statement result. */ private final UnresolvedExpression elseClause; @Override @@ -58,5 +51,4 @@ public List getChild() { public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitCase(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java b/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java index 9121dbd87c..2019346fb5 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Cast.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import static org.opensearch.sql.expression.function.BuiltinFunctionName.CAST_TO_BOOLEAN; @@ -33,9 +32,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.function.FunctionName; -/** - * AST node that represents Cast clause. - */ +/** AST node that represents Cast clause. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @Getter @@ -59,19 +56,16 @@ public class Cast extends UnresolvedExpression { .put("datetime", CAST_TO_DATETIME.getName()) .build(); - /** - * The source expression cast from. - */ + /** The source expression cast from. */ private final UnresolvedExpression expression; - /** - * Expression that represents name of the target type. - */ + /** Expression that represents name of the target type. */ private final UnresolvedExpression convertedType; /** * Check if the given function name is a cast function or not. - * @param name function name + * + * @param name function name * @return true if cast function, otherwise false. */ public static boolean isCastFunction(FunctionName name) { @@ -80,7 +74,8 @@ public static boolean isCastFunction(FunctionName name) { /** * Get the cast function name for a given target data type. - * @param targetType target data type + * + * @param targetType target data type * @return cast function name corresponding */ public static FunctionName getCastFunctionName(ExprType targetType) { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java b/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java index 25cf3e0f73..8ba6ba5116 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Compare.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java b/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java index 8755a15177..0b9add8f55 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/DataType.java @@ -3,17 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprCoreType; -/** - * The DataType defintion in AST. - * Question, could we use {@link ExprCoreType} directly in AST? - */ +/** The DataType defintion in AST. Question, could we use {@link ExprCoreType} directly in AST? */ @RequiredArgsConstructor public enum DataType { TYPE_ERROR(ExprCoreType.UNKNOWN), @@ -32,6 +28,5 @@ public enum DataType { TIMESTAMP(ExprCoreType.TIMESTAMP), INTERVAL(ExprCoreType.INTERVAL); - @Getter - private final ExprCoreType coreType; + @Getter private final ExprCoreType coreType; } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java b/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java index 806f897abf..344f3c5164 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/EqualTo.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,17 +13,13 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of binary operator or comparison relation EQUAL. - */ +/** Expression node of binary operator or comparison relation EQUAL. */ @ToString @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class EqualTo extends UnresolvedExpression { - @Getter - private UnresolvedExpression left; - @Getter - private UnresolvedExpression right; + @Getter private UnresolvedExpression left; + @Getter private UnresolvedExpression right; @Override public List getChild() { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Field.java b/core/src/main/java/org/opensearch/sql/ast/expression/Field.java index 9a8109fbe3..0a2d726ad4 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Field.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Field.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -23,16 +22,12 @@ public class Field extends UnresolvedExpression { private final List fieldArgs; - /** - * Constructor of Field. - */ + /** Constructor of Field. */ public Field(UnresolvedExpression field) { this(field, Collections.emptyList()); } - /** - * Constructor of Field. - */ + /** Constructor of Field. */ public Field(UnresolvedExpression field, List fieldArgs) { this.field = field; this.fieldArgs = fieldArgs; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Function.java b/core/src/main/java/org/opensearch/sql/ast/expression/Function.java index c712d860f4..184342c5e3 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Function.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Function.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -14,10 +13,9 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.ast.AbstractNodeVisitor; - /** - * Expression node of scalar function. - * Params include function name (@funcName) and function arguments (@funcArgs) + * Expression node of scalar function. Params include function name (@funcName) and function + * arguments (@funcArgs) */ @Getter @EqualsAndHashCode(callSuper = false) @@ -38,9 +36,8 @@ public R accept(AbstractNodeVisitor nodeVisitor, C context) { @Override public String toString() { - return String.format("%s(%s)", funcName, - funcArgs.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); + return String.format( + "%s(%s)", + funcName, funcArgs.stream().map(Object::toString).collect(Collectors.joining(", "))); } } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java index 0d4e57a78c..128d9327e8 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/HighlightFunction.java @@ -13,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of Highlight function. - */ +/** Expression node of Highlight function. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @Getter diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/In.java b/core/src/main/java/org/opensearch/sql/ast/expression/In.java index 9ce1c124cb..38c1b91b43 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/In.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/In.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -15,10 +14,9 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node of one-to-many mapping relation IN. - * Params include the field expression and/or wildcard field expression, - * nested field expression (@field). - * And the values that the field is mapped to (@valueList). + * Expression node of one-to-many mapping relation IN. Params include the field expression and/or + * wildcard field expression, nested field expression (@field). And the values that the field is + * mapped to (@valueList). */ @Getter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java b/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java index 84b6ba02d1..c26f829f48 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Interval.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java b/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java index 2a86c89cf6..19e1b07e39 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/IntervalUnit.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -13,7 +12,7 @@ @Getter @RequiredArgsConstructor -public enum IntervalUnit { +public enum IntervalUnit { UNKNOWN, MICROSECOND, @@ -44,9 +43,7 @@ public enum IntervalUnit { INTERVAL_UNITS = builder.add(IntervalUnit.values()).build(); } - /** - * Util method to get interval unit given the unit name. - */ + /** Util method to get interval unit given the unit name. */ public static IntervalUnit of(String unit) { return INTERVAL_UNITS.stream() .filter(v -> unit.equalsIgnoreCase(v.name())) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Let.java b/core/src/main/java/org/opensearch/sql/ast/expression/Let.java index cea2a091e5..2f63a25f10 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Let.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Let.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Represent the assign operation. e.g. velocity = distance/speed. - */ +/** Represent the assign operation. e.g. velocity = distance/speed. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java b/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java index 3ff360dbf0..80c877dd97 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Literal.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -14,9 +13,8 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node of literal type - * Params include literal value (@value) and - * literal data type (@type) which can be selected from {@link DataType}. + * Expression node of literal type Params include literal value (@value) and literal data type + * (@type) which can be selected from {@link DataType}. */ @Getter @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Map.java b/core/src/main/java/org/opensearch/sql/ast/expression/Map.java index 45e98f127f..73d8d77261 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Map.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Map.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of one-to-one mapping relation. - */ +/** Expression node of one-to-one mapping relation. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java b/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java index adf2025e6c..9449eed852 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/NestedAllTupleFields.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Collections; @@ -15,14 +14,11 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Represents all tuple fields used in nested function. - */ +/** Represents all tuple fields used in nested function. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class NestedAllTupleFields extends UnresolvedExpression { - @Getter - private final String path; + @Getter private final String path; @Override public List getChild() { diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Not.java b/core/src/main/java/org/opensearch/sql/ast/expression/Not.java index 2926c7e5cd..423cb088ef 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Not.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Not.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of the logic NOT. - */ +/** Expression node of the logic NOT. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Or.java b/core/src/main/java/org/opensearch/sql/ast/expression/Or.java index b0dabb6e4e..cc59170d31 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Or.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Or.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of the logic OR. - */ +/** Expression node of the logic OR. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java b/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java index 83a46323e6..7a2587c5f0 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/ParseMethod.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.Getter; @@ -15,6 +14,5 @@ public enum ParseMethod { GROK("grok"), PATTERNS("patterns"); - @Getter - private final String name; + @Getter private final String name; } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java b/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java index 7f00775a84..73c6e3782a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/QualifiedName.java @@ -75,16 +75,15 @@ public Optional first() { } /** - *
    * Get rest parts of the qualified name. Assume that there must be remaining parts so caller is
-   * responsible for the check (first() or size() must be called first).
-   * For example:
-   * {@code
-   * QualifiedName name = ...
-   * Optional first = name.first();
-   * if (first.isPresent()) {
-   *    name.rest() ...
-   * }
+   * responsible for the check (first() or size() must be called first).
+ * For example:
+ * {@code
+ *   QualifiedName name = ...
+ *   Optional first = name.first();
+ *   if (first.isPresent()) {
+ *   name.rest() ...
+ *   }
* } * @return rest part(s) */ diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java b/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java index 3166fe45c3..a9e26a611f 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/RelevanceFieldList.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.List; @@ -13,14 +12,11 @@ import lombok.Getter; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node that includes a list of RelevanceField nodes. - */ +/** Expression node that includes a list of RelevanceField nodes. */ @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class RelevanceFieldList extends UnresolvedExpression { - @Getter - private java.util.Map fieldList; + @Getter private java.util.Map fieldList; @Override public List getChild() { @@ -34,9 +30,7 @@ public R accept(AbstractNodeVisitor nodeVisitor, C context) { @Override public String toString() { - return fieldList - .entrySet() - .stream() + return fieldList.entrySet().stream() .map(e -> String.format("\"%s\" ^ %s", e.getKey(), e.getValue())) .collect(Collectors.joining(", ")); } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java index 1b73f9bd95..26d784dd8c 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/ScoreFunction.java @@ -13,8 +13,8 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node of Score function. - * Score takes a relevance-search expression as an argument and returns it + * Expression node of Score function. Score takes a relevance-search expression as an argument and + * returns it */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Span.java b/core/src/main/java/org/opensearch/sql/ast/expression/Span.java index e57205c19c..edd309b22d 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Span.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Span.java @@ -13,10 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Span expression node. - * Params include field expression and the span value. - */ +/** Span expression node. Params include field expression and the span value. */ @Getter @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor @@ -35,5 +32,4 @@ public List getChild() { public R accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitSpan(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java b/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java index f1f3fb19d4..5252781edb 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java @@ -42,9 +42,7 @@ public enum SpanUnit { SPAN_UNITS = builder.add(SpanUnit.values()).build(); } - /** - * Util method to get span unit given the unit name. - */ + /** Util method to get span unit given the unit name. */ public static SpanUnit of(String unit) { switch (unit) { case "": @@ -64,5 +62,4 @@ public static SpanUnit of(String unit) { public static String getName(SpanUnit unit) { return unit.name; } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java index e4def038ed..2c6eee46e9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedArgument.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -13,9 +12,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Argument. - */ +/** Argument. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java index f4bc88853f..e1754cbf76 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedAttribute.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -15,9 +14,8 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; /** - * Expression node, representing the syntax that is not resolved to - * any other expression nodes yet but non-negligible - * This expression is often created as the index name, field name etc. + * Expression node, representing the syntax that is not resolved to any other expression nodes yet + * but non-negligible This expression is often created as the index name, field name etc. */ @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java index ee3922f797..9b2d530b7f 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/UnresolvedExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import lombok.EqualsAndHashCode; diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/When.java b/core/src/main/java/org/opensearch/sql/ast/expression/When.java index a52870b408..db747592e5 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/When.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/When.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -15,23 +14,17 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node that represents WHEN clause. - */ +/** AST node that represents WHEN clause. */ @EqualsAndHashCode(callSuper = false) @Getter @RequiredArgsConstructor @ToString public class When extends UnresolvedExpression { - /** - * WHEN condition, either a search condition or compare value if case value present. - */ + /** WHEN condition, either a search condition or compare value if case value present. */ private final UnresolvedExpression condition; - /** - * Result to return if condition matched. - */ + /** Result to return if condition matched. */ private final UnresolvedExpression result; @Override @@ -43,5 +36,4 @@ public List getChild() { public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitWhen(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java b/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java index 9a7535e1fe..47f5265765 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/WindowFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import com.google.common.collect.ImmutableList; @@ -42,5 +41,4 @@ public List getChild() { public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitWindowFunction(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java b/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java index 731feccd33..40db015fd7 100644 --- a/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Xor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import java.util.Arrays; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Expression node of the logic XOR. - */ +/** Expression node of the logic XOR. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java b/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java index 1e364d69fd..5ee260c130 100644 --- a/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java +++ b/core/src/main/java/org/opensearch/sql/ast/statement/Explain.java @@ -12,9 +12,7 @@ import lombok.EqualsAndHashCode; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Explain Statement. - */ +/** Explain Statement. */ @Data @EqualsAndHashCode(callSuper = false) public class Explain extends Statement { diff --git a/core/src/main/java/org/opensearch/sql/ast/statement/Query.java b/core/src/main/java/org/opensearch/sql/ast/statement/Query.java index 82efdde4dd..6366451b72 100644 --- a/core/src/main/java/org/opensearch/sql/ast/statement/Query.java +++ b/core/src/main/java/org/opensearch/sql/ast/statement/Query.java @@ -16,9 +16,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.tree.UnresolvedPlan; -/** - * Query Statement. - */ +/** Query Statement. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java b/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java index e32a8dbfd8..d90071a0ca 100644 --- a/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java +++ b/core/src/main/java/org/opensearch/sql/ast/statement/Statement.java @@ -11,9 +11,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Statement is the high interface of core engine. - */ +/** Statement is the high interface of core engine. */ public abstract class Statement extends Node { @Override public R accept(AbstractNodeVisitor visitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/AD.java b/core/src/main/java/org/opensearch/sql/ast/tree/AD.java index e9aee25c23..0e1cc33db9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/AD.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/AD.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java b/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java index e9fa26e981..f098d0ec53 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Aggregation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -17,9 +16,7 @@ import org.opensearch.sql.ast.expression.Argument; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Aggregation, the interface for building aggregation actions in queries. - */ +/** Logical plan node of Aggregation, the interface for building aggregation actions in queries. */ @Getter @Setter @ToString @@ -32,23 +29,21 @@ public class Aggregation extends UnresolvedPlan { private List argExprList; private UnresolvedPlan child; - /** - * Aggregation Constructor without span and argument. - */ - public Aggregation(List aggExprList, - List sortExprList, - List groupExprList) { + /** Aggregation Constructor without span and argument. */ + public Aggregation( + List aggExprList, + List sortExprList, + List groupExprList) { this(aggExprList, sortExprList, groupExprList, null, Collections.emptyList()); } - /** - * Aggregation Constructor. - */ - public Aggregation(List aggExprList, - List sortExprList, - List groupExprList, - UnresolvedExpression span, - List argExprList) { + /** Aggregation Constructor. */ + public Aggregation( + List aggExprList, + List sortExprList, + List groupExprList, + UnresolvedExpression span, + List argExprList) { this.aggExprList = aggExprList; this.sortExprList = sortExprList; this.groupExprList = groupExprList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java b/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java index cf82c2b070..832b79d34a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/CloseCursor.java @@ -9,15 +9,10 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node to represent close cursor operation. - * Actually a wrapper to the AST. - */ +/** AST node to represent close cursor operation. Actually a wrapper to the AST. */ public class CloseCursor extends UnresolvedPlan { - /** - * An instance of {@link FetchCursor}. - */ + /** An instance of {@link FetchCursor}. */ private UnresolvedPlan cursor; @Override diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java b/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java index 6514d65a04..3173f42f9c 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Dedupe.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -18,9 +17,7 @@ import org.opensearch.sql.ast.expression.Argument; import org.opensearch.sql.ast.expression.Field; -/** - * AST node represent Dedupe operation. - */ +/** AST node represent Dedupe operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java b/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java index 184f2ac497..ecceabd757 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Eval.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -16,9 +15,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.Let; -/** - * AST node represent Eval operation. - */ +/** AST node represent Eval operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java b/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java index aa327c295b..d6a260ad0b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/FetchCursor.java @@ -10,15 +10,11 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * An unresolved plan that represents fetching the next - * batch in paginationed plan. - */ +/** An unresolved plan that represents fetching the next batch in paginated plan. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class FetchCursor extends UnresolvedPlan { - @Getter - final String cursor; + @Getter final String cursor; @Override public T accept(AbstractNodeVisitor nodeVisitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java b/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java index 8128078930..6c57275db9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Filter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -14,9 +13,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Filter, the interface for building filters in queries. - */ +/** Logical plan node of Filter, the interface for building filters in queries. */ @ToString @EqualsAndHashCode(callSuper = false) @Getter diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Head.java b/core/src/main/java/org/opensearch/sql/ast/tree/Head.java index 8590c212ad..bf6b2caed1 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Head.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Head.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -16,9 +15,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * AST node represent Head operation. - */ +/** AST node represent Head operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java b/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java index 5d2e32c28b..ed03efed8a 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Kmeans.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java b/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java index f9b8a8feab..9290776172 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Limit.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -38,5 +37,4 @@ public List getChild() { public T accept(AbstractNodeVisitor visitor, C context) { return visitor.visitLimit(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/ML.java b/core/src/main/java/org/opensearch/sql/ast/tree/ML.java index 2f83a993b7..320fb2bcf7 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/ML.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/ML.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import static org.opensearch.sql.utils.MLCommonsConstants.ACTION; @@ -66,7 +65,7 @@ public List getChild() { } private String getAction() { - return (String) arguments.get(ACTION).getValue(); + return (String) arguments.get(ACTION).getValue(); } /** @@ -85,7 +84,7 @@ public Map getOutputSchema(TypeEnvironment env) { return getPredictOutputSchema(); default: throw new IllegalArgumentException( - "Action error. Please indicate train, predict or trainandpredict."); + "Action error. Please indicate train, predict or trainandpredict."); } } @@ -122,8 +121,8 @@ public Map getPredictOutputSchema() { * @return the schema */ public Map getTrainOutputSchema() { - boolean isAsync = arguments.containsKey(ASYNC) - ? (boolean) arguments.get(ASYNC).getValue() : false; + boolean isAsync = + arguments.containsKey(ASYNC) ? (boolean) arguments.get(ASYNC).getValue() : false; Map res = new HashMap<>(Map.of(STATUS, ExprCoreType.STRING)); if (isAsync) { res.put(TASKID, ExprCoreType.STRING); diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java b/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java index 55e0e8c7a6..69b11600c9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java @@ -13,16 +13,12 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * AST node to represent pagination operation. - * Actually a wrapper to the AST. - */ +/** AST node to represent pagination operation. Actually a wrapper to the AST. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) @ToString public class Paginate extends UnresolvedPlan { - @Getter - private final int pageSize; + @Getter private final int pageSize; private UnresolvedPlan child; public Paginate(int pageSize, UnresolvedPlan child) { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java b/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java index 02a69c93af..e91c4a68ff 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Parse.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -20,9 +19,7 @@ import org.opensearch.sql.ast.expression.ParseMethod; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * AST node represent Parse with regex operation. - */ +/** AST node represent Parse with regex operation. */ @Getter @Setter @ToString @@ -30,29 +27,19 @@ @RequiredArgsConstructor @AllArgsConstructor public class Parse extends UnresolvedPlan { - /** - * Method used to parse a field. - */ + /** Method used to parse a field. */ private final ParseMethod parseMethod; - /** - * Field. - */ + /** Field. */ private final UnresolvedExpression sourceField; - /** - * Pattern. - */ + /** Pattern. */ private final Literal pattern; - /** - * Optional arguments. - */ + /** Optional arguments. */ private final Map arguments; - /** - * Child Plan. - */ + /** Child Plan. */ private UnresolvedPlan child; @Override diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Project.java b/core/src/main/java/org/opensearch/sql/ast/tree/Project.java index 33c7128855..cffb4dfdce 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Project.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Project.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -17,15 +16,12 @@ import org.opensearch.sql.ast.expression.Argument; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Project, the interface for building the list of searching fields. - */ +/** Logical plan node of Project, the interface for building the list of searching fields. */ @ToString @Getter @EqualsAndHashCode(callSuper = false) public class Project extends UnresolvedPlan { - @Setter - private List projectList; + @Setter private List projectList; private List argExprList; private UnresolvedPlan child; @@ -43,9 +39,7 @@ public boolean hasArgument() { return !argExprList.isEmpty(); } - /** - * The Project could been used to exclude fields from the source. - */ + /** The Project could been used to exclude fields from the source. */ public boolean isExcluded() { if (hasArgument()) { Argument argument = argExprList.get(0); diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java b/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java index 407d37e8e4..2cbe170541 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/RareTopN.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import java.util.Collections; @@ -19,9 +18,7 @@ import org.opensearch.sql.ast.expression.Field; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * AST node represent RareTopN operation. - */ +/** AST node represent RareTopN operation. */ @Getter @Setter @ToString diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java b/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java index 8c3868329f..ec5264a86b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Relation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -18,9 +17,7 @@ import org.opensearch.sql.ast.expression.QualifiedName; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * Logical plan node of Relation, the interface for building the searching sources. - */ +/** Logical plan node of Relation, the interface for building the searching sources. */ @AllArgsConstructor @ToString @EqualsAndHashCode(callSuper = false) @@ -39,9 +36,7 @@ public Relation(UnresolvedExpression tableName, String alias) { this.alias = alias; } - /** - * Optional alias name for the relation. - */ + /** Optional alias name for the relation. */ private String alias; /** @@ -72,9 +67,9 @@ public String getAlias() { } /** - * Get Qualified name preservs parts of the user given identifiers. - * This can later be utilized to determine DataSource,Schema and Table Name during - * Analyzer stage. So Passing QualifiedName directly to Analyzer Stage. + * Get Qualified name preservs parts of the user given identifiers. This can later be utilized to + * determine DataSource,Schema and Table Name during Analyzer stage. So Passing QualifiedName + * directly to Analyzer Stage. * * @return TableQualifiedName. */ @@ -82,9 +77,10 @@ public QualifiedName getTableQualifiedName() { if (tableName.size() == 1) { return (QualifiedName) tableName.get(0); } else { - return new QualifiedName(tableName.stream() - .map(UnresolvedExpression::toString) - .collect(Collectors.joining(COMMA))); + return new QualifiedName( + tableName.stream() + .map(UnresolvedExpression::toString) + .collect(Collectors.joining(COMMA))); } } diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java b/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java index 89122bea7f..5ab4d71dd9 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/RelationSubquery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -14,9 +13,7 @@ import lombok.ToString; import org.opensearch.sql.ast.AbstractNodeVisitor; -/** - * Logical plan node of RelationSubquery. - */ +/** Logical plan node of RelationSubquery. */ @AllArgsConstructor @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor @@ -25,9 +22,7 @@ public class RelationSubquery extends UnresolvedPlan { private UnresolvedPlan query; private String alias; - /** - * Take subquery alias as table name. - */ + /** Take subquery alias as table name. */ public String getAliasAsTableName() { return alias; } diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java b/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java index 69700c871c..e6f760aca0 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Rename.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java b/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java index 5fb4139bea..073cb7aa1b 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Sort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import static org.opensearch.sql.ast.tree.Sort.NullOrder.NULL_FIRST; @@ -22,9 +21,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.Field; -/** - * AST node for Sort {@link Sort#sortList} represent a list of sort expression and sort options. - */ +/** AST node for Sort {@link Sort#sortList} represent a list of sort expression and sort options. */ @ToString @EqualsAndHashCode(callSuper = false) @Getter @@ -50,19 +47,14 @@ public T accept(AbstractNodeVisitor nodeVisitor, C context) { return nodeVisitor.visitSort(this, context); } - /** - * Sort Options. - */ + /** Sort Options. */ @Data public static class SortOption { - /** - * Default ascending sort option, null first. - */ + /** Default ascending sort option, null first. */ public static SortOption DEFAULT_ASC = new SortOption(ASC, NULL_FIRST); - /** - * Default descending sort option, null last. - */ + + /** Default descending sort option, null last. */ public static SortOption DEFAULT_DESC = new SortOption(DESC, NULL_LAST); private final SortOrder sortOrder; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java b/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java index 064cbf24fe..a193964a18 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/TableFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -18,9 +17,7 @@ import org.opensearch.sql.ast.expression.QualifiedName; import org.opensearch.sql.ast.expression.UnresolvedExpression; -/** - * ASTNode for Table Function. - */ +/** AST Node for Table Function. */ @ToString @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor @@ -28,8 +25,7 @@ public class TableFunction extends UnresolvedPlan { private final UnresolvedExpression functionName; - @Getter - private final List arguments; + @Getter private final List arguments; public QualifiedName getFunctionName() { return (QualifiedName) functionName; diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java b/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java index 672a4602ed..3074303b0d 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/UnresolvedPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import lombok.EqualsAndHashCode; @@ -11,9 +10,7 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.Node; -/** - * Abstract unresolved plan. - */ +/** Abstract unresolved plan. */ @EqualsAndHashCode(callSuper = false) @ToString public abstract class UnresolvedPlan extends Node { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Values.java b/core/src/main/java/org/opensearch/sql/ast/tree/Values.java index 5a662912f9..65d7e8d7cb 100644 --- a/core/src/main/java/org/opensearch/sql/ast/tree/Values.java +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Values.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.tree; import com.google.common.collect.ImmutableList; @@ -16,9 +15,7 @@ import org.opensearch.sql.ast.Node; import org.opensearch.sql.ast.expression.Literal; -/** - * AST node class for a sequence of literal values. - */ +/** AST node class for a sequence of literal values. */ @ToString @Getter @EqualsAndHashCode(callSuper = false) @@ -41,5 +38,4 @@ public T accept(AbstractNodeVisitor nodeVisitor, C context) { public List getChild() { return ImmutableList.of(); } - } diff --git a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java index 1f6363c068..48781df847 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import com.google.common.base.Objects; import lombok.RequiredArgsConstructor; -/** - * Expression Number Value. - */ +/** Expression Number Value. */ @RequiredArgsConstructor public abstract class AbstractExprNumberValue extends AbstractExprValue { private final Number value; diff --git a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java index f332867645..d3e15683e7 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprValue.java @@ -31,7 +31,6 @@ public int compareTo(ExprValue other) { /** * The customize equals logic.
* The table below list the NULL and MISSING handling logic. - * * * * diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java index d655c0dabb..ee97cf2228 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprBooleanValue.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import com.google.common.base.Objects; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Boolean Value. - */ +/** Expression Boolean Value. */ public class ExprBooleanValue extends AbstractExprValue { private static final ExprBooleanValue TRUE = new ExprBooleanValue(true); private static final ExprBooleanValue FALSE = new ExprBooleanValue(false); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java index b39e6e9d7f..8e74a83cf3 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprByteValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Byte Value. - */ +/** Expression Byte Value. */ public class ExprByteValue extends AbstractExprNumberValue { public ExprByteValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java index 1326733263..d1f56c3d76 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprCollectionValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import com.google.common.base.Objects; @@ -15,9 +14,7 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Collection Value. - */ +/** Expression Collection Value. */ @RequiredArgsConstructor public class ExprCollectionValue extends AbstractExprValue { private final List valueList; @@ -43,9 +40,7 @@ public List collectionValue() { @Override public String toString() { - return valueList.stream() - .map(Object::toString) - .collect(Collectors.joining(", ", "[", "]")); + return valueList.stream().map(Object::toString).collect(Collectors.joining(", ", "[", "]")); } @Override @@ -68,9 +63,7 @@ public boolean equal(ExprValue o) { } } - /** - * Only compare the size of the list. - */ + /** Only compare the size of the list. */ @Override public int compare(ExprValue other) { return Integer.compare(valueList.size(), other.collectionValue().size()); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java index 57ce87df47..3f3f67a4fa 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDateValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL; @@ -22,23 +21,19 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Expression Date Value. - */ +/** Expression Date Value. */ @RequiredArgsConstructor public class ExprDateValue extends AbstractExprValue { private final LocalDate date; - /** - * Constructor of ExprDateValue. - */ + /** Constructor of ExprDateValue. */ public ExprDateValue(String date) { try { this.date = LocalDate.parse(date, DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL); } catch (DateTimeParseException e) { - throw new SemanticCheckException(String.format("date:%s in unsupported format, please use " - + "yyyy-MM-dd", date)); + throw new SemanticCheckException( + String.format("date:%s in unsupported format, please use 'yyyy-MM-dd'", date)); } } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java index 8d40aaf82c..305958043f 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDatetimeValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_WITH_TZ; @@ -23,20 +22,19 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; - @RequiredArgsConstructor public class ExprDatetimeValue extends AbstractExprValue { private final LocalDateTime datetime; - /** - * Constructor with datetime string as input. - */ + /** Constructor with datetime string as input. */ public ExprDatetimeValue(String datetime) { try { this.datetime = LocalDateTime.parse(datetime, DATE_TIME_FORMATTER_WITH_TZ); } catch (DateTimeParseException e) { - throw new SemanticCheckException(String.format("datetime:%s in unsupported format, please " - + "use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", datetime)); + throw new SemanticCheckException( + String.format( + "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + datetime)); } } @@ -77,9 +75,11 @@ public boolean equal(ExprValue other) { @Override public String value() { - return String.format("%s %s", DateTimeFormatter.ISO_DATE.format(datetime), - DateTimeFormatter.ISO_TIME.format((datetime.getNano() == 0) - ? datetime.truncatedTo(ChronoUnit.SECONDS) : datetime)); + return String.format( + "%s %s", + DateTimeFormatter.ISO_DATE.format(datetime), + DateTimeFormatter.ISO_TIME.format( + (datetime.getNano() == 0) ? datetime.truncatedTo(ChronoUnit.SECONDS) : datetime)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java index 171b064e68..c192fe901c 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprDoubleValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Double Value. - */ +/** Expression Double Value. */ public class ExprDoubleValue extends AbstractExprNumberValue { public ExprDoubleValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java index dc454b4b50..6d321687fb 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprFloatValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Float Value. - */ +/** Expression Float Value. */ public class ExprFloatValue extends AbstractExprNumberValue { public ExprFloatValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java index 06947766fc..81321c1f12 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprIntegerValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Integer Value. - */ +/** Expression Integer Value. */ public class ExprIntegerValue extends AbstractExprNumberValue { public ExprIntegerValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java index 25a3115e8c..754520d7c8 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprIntervalValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.time.temporal.TemporalAmount; @@ -27,7 +26,8 @@ public int compare(ExprValue other) { TemporalAmount otherInterval = other.intervalValue(); if (!interval.getClass().equals(other.intervalValue().getClass())) { throw new ExpressionEvaluationException( - String.format("invalid to compare intervals with units %s and %s", + String.format( + "invalid to compare intervals with units %s and %s", unit(), ((ExprIntervalValue) other).unit())); } return Long.compare( @@ -49,12 +49,9 @@ public ExprType type() { return ExprCoreType.INTERVAL; } - /** - * Util method to get temporal unit stored locally. - */ + /** Util method to get temporal unit stored locally. */ public TemporalUnit unit() { - return interval.getUnits() - .stream() + return interval.getUnits().stream() .filter(v -> interval.get(v) != 0) .findAny() .orElse(interval.getUnits().get(0)); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java index 1df590246c..537a164490 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprLongValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Long Value. - */ +/** Expression Long Value. */ public class ExprLongValue extends AbstractExprNumberValue { public ExprLongValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java index 9908074773..80add24042 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprMissingValue.java @@ -3,21 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.util.Objects; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Missing Value. - */ +/** Expression Missing Value. */ public class ExprMissingValue extends AbstractExprValue { private static final ExprMissingValue instance = new ExprMissingValue(); - private ExprMissingValue() { - } + private ExprMissingValue() {} public static ExprMissingValue of() { return instance; @@ -40,13 +36,13 @@ public boolean isMissing() { @Override public int compare(ExprValue other) { - throw new IllegalStateException(String.format("[BUG] Unreachable, Comparing with MISSING is " - + "undefined")); + throw new IllegalStateException( + String.format("[BUG] Unreachable, Comparing with MISSING is " + "undefined")); } /** - * Missing value is equal to Missing value. - * Notes, this function should only used for Java Object Compare. + * Missing value is equal to Missing value. Notes, this function should only used for Java Object + * Compare. */ @Override public boolean equal(ExprValue other) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java index 54d4811d33..ac2e185e65 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprNullValue.java @@ -3,21 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.util.Objects; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Null Value. - */ +/** Expression Null Value. */ public class ExprNullValue extends AbstractExprValue { private static final ExprNullValue instance = new ExprNullValue(); - private ExprNullValue() { - } + private ExprNullValue() {} @Override public int hashCode() { @@ -55,8 +51,8 @@ public int compare(ExprValue other) { } /** - * NULL value is equal to NULL value. - * Notes, this function should only used for Java Object Compare. + * NULL value is equal to NULL value. Notes, this function should only used for Java Object + * Compare. */ @Override public boolean equal(ExprValue other) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java index 3e5f6858bc..418cefa166 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprShortValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Expression Short Value. - */ +/** Expression Short Value. */ public class ExprShortValue extends AbstractExprNumberValue { public ExprShortValue(Number value) { diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java index c41c23d6ac..7745af62b6 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprStringValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.time.LocalDate; @@ -15,9 +14,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Expression String Value. - */ +/** Expression String Value. */ @RequiredArgsConstructor public class ExprStringValue extends AbstractExprValue { private final String value; @@ -44,11 +41,13 @@ public LocalDateTime datetimeValue() { } catch (SemanticCheckException e) { try { return new ExprDatetimeValue( - LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) + LocalDateTime.of(new ExprDateValue(value).dateValue(), LocalTime.of(0, 0, 0))) .datetimeValue(); } catch (SemanticCheckException exception) { - throw new SemanticCheckException(String.format("datetime:%s in unsupported format, please " - + "use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", value)); + throw new SemanticCheckException( + String.format( + "datetime:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + value)); } } } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java index d15cee5e71..455a379b03 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTimestampValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS; @@ -22,33 +21,33 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.SemanticCheckException; -/** - * Expression Timestamp Value. - */ +/** Expression Timestamp Value. */ @RequiredArgsConstructor public class ExprTimestampValue extends AbstractExprValue { private final Instant timestamp; - /** - * Constructor. - */ + /** Constructor. */ public ExprTimestampValue(String timestamp) { try { - this.timestamp = LocalDateTime.parse(timestamp, DATE_TIME_FORMATTER_VARIABLE_NANOS) - .atZone(UTC_ZONE_ID) - .toInstant(); + this.timestamp = + LocalDateTime.parse(timestamp, DATE_TIME_FORMATTER_VARIABLE_NANOS) + .atZone(UTC_ZONE_ID) + .toInstant(); } catch (DateTimeParseException e) { - throw new SemanticCheckException(String.format("timestamp:%s in unsupported format, please " - + "use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", timestamp)); + throw new SemanticCheckException( + String.format( + "timestamp:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + timestamp)); } - } @Override public String value() { - return timestamp.getNano() == 0 ? DATE_TIME_FORMATTER_WITHOUT_NANO.withZone(UTC_ZONE_ID) - .format(timestamp.truncatedTo(ChronoUnit.SECONDS)) + return timestamp.getNano() == 0 + ? DATE_TIME_FORMATTER_WITHOUT_NANO + .withZone(UTC_ZONE_ID) + .format(timestamp.truncatedTo(ChronoUnit.SECONDS)) : DATE_TIME_FORMATTER_VARIABLE_NANOS.withZone(UTC_ZONE_ID).format(timestamp); } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java index 749de931ee..856075bed8 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprTupleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.util.Iterator; @@ -18,9 +17,7 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; import org.opensearch.sql.storage.bindingtuple.LazyBindingTuple; -/** - * Expression Tuple Value. - */ +/** Expression Tuple Value. */ @RequiredArgsConstructor public class ExprTupleValue extends AbstractExprValue { @@ -47,8 +44,7 @@ public ExprType type() { @Override public String toString() { - return valueMap.entrySet() - .stream() + return valueMap.entrySet().stream() .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) .collect(Collectors.joining(",", "{", "}")); } @@ -70,6 +66,7 @@ public ExprValue keyValue(String key) { /** * Override the equals method. + * * @return true for equal, otherwise false. */ public boolean equal(ExprValue o) { @@ -91,9 +88,7 @@ public boolean equal(ExprValue o) { } } - /** - * Only compare the size of the map. - */ + /** Only compare the size of the map. */ @Override public int compare(ExprValue other) { return Integer.compare(valueMap.size(), other.tupleValue().size()); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java index 1ae03de37b..86bead77b7 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import java.io.Serializable; @@ -19,18 +18,12 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.storage.bindingtuple.BindingTuple; -/** - * The definition of the Expression Value. - */ +/** The definition of the Expression Value. */ public interface ExprValue extends Serializable, Comparable { - /** - * Get the Object value of the Expression Value. - */ + /** Get the Object value of the Expression Value. */ Object value(); - /** - * Get the {@link ExprCoreType} of the Expression Value. - */ + /** Get the {@link ExprCoreType} of the Expression Value. */ ExprType type(); /** @@ -69,136 +62,104 @@ default boolean isDateTime() { return false; } - /** - * Get the {@link BindingTuple}. - */ + /** Get the {@link BindingTuple}. */ default BindingTuple bindingTuples() { return BindingTuple.EMPTY; } - /** - * Get byte value. - */ + /** Get byte value. */ default Byte byteValue() { throw new ExpressionEvaluationException( "invalid to get byteValue from value of type " + type()); } - /** - * Get short value. - */ + /** Get short value. */ default Short shortValue() { throw new ExpressionEvaluationException( "invalid to get shortValue from value of type " + type()); } - /** - * Get integer value. - */ + /** Get integer value. */ default Integer integerValue() { throw new ExpressionEvaluationException( "invalid to get integerValue from value of type " + type()); } - /** - * Get long value. - */ + /** Get long value. */ default Long longValue() { throw new ExpressionEvaluationException( "invalid to get longValue from value of type " + type()); } - /** - * Get float value. - */ + /** Get float value. */ default Float floatValue() { throw new ExpressionEvaluationException( "invalid to get floatValue from value of type " + type()); } - /** - * Get float value. - */ + /** Get float value. */ default Double doubleValue() { throw new ExpressionEvaluationException( "invalid to get doubleValue from value of type " + type()); } - /** - * Get string value. - */ + /** Get string value. */ default String stringValue() { throw new ExpressionEvaluationException( "invalid to get stringValue from value of type " + type()); } - /** - * Get boolean value. - */ + /** Get boolean value. */ default Boolean booleanValue() { throw new ExpressionEvaluationException( "invalid to get booleanValue from value of type " + type()); } - /** - * Get timestamp value. - */ + /** Get timestamp value. */ default Instant timestampValue() { throw new ExpressionEvaluationException( "invalid to get timestampValue from value of type " + type()); } - /** - * Get time value. - */ + /** Get time value. */ default LocalTime timeValue() { throw new ExpressionEvaluationException( "invalid to get timeValue from value of type " + type()); } - /** - * Get date value. - */ + /** Get date value. */ default LocalDate dateValue() { throw new ExpressionEvaluationException( "invalid to get dateValue from value of type " + type()); } - /** - * Get datetime value. - */ + /** Get datetime value. */ default LocalDateTime datetimeValue() { throw new ExpressionEvaluationException( "invalid to get datetimeValue from value of type " + type()); } - /** - * Get interval value. - */ + /** Get interval value. */ default TemporalAmount intervalValue() { throw new ExpressionEvaluationException( "invalid to get intervalValue from value of type " + type()); } - /** - * Get map value. - */ + /** Get map value. */ default Map tupleValue() { throw new ExpressionEvaluationException( "invalid to get tupleValue from value of type " + type()); } - /** - * Get collection value. - */ + /** Get collection value. */ default List collectionValue() { throw new ExpressionEvaluationException( "invalid to get collectionValue from value of type " + type()); } /** - * Get the value specified by key from {@link ExprTupleValue}. - * This method only be implemented in {@link ExprTupleValue}. + * Get the value specified by key from {@link ExprTupleValue}. This method only be implemented in + * {@link ExprTupleValue}. */ default ExprValue keyValue(String key) { return ExprMissingValue.of(); diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java index 43a3140ef3..a259eb9fba 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java @@ -18,9 +18,7 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.exception.ExpressionEvaluationException; -/** - * The definition of {@link ExprValue} factory. - */ +/** The definition of {@link ExprValue} factory. */ @UtilityClass public class ExprValueUtils { public static final ExprValue LITERAL_TRUE = ExprBooleanValue.of(true); @@ -80,19 +78,15 @@ public static ExprValue timestampValue(Instant value) { return new ExprTimestampValue(value); } - /** - * {@link ExprTupleValue} constructor. - */ + /** {@link ExprTupleValue} constructor. */ public static ExprValue tupleValue(Map map) { LinkedHashMap valueMap = new LinkedHashMap<>(); - map.forEach((k, v) -> valueMap - .put(k, v instanceof ExprValue ? (ExprValue) v : fromObjectValue(v))); + map.forEach( + (k, v) -> valueMap.put(k, v instanceof ExprValue ? (ExprValue) v : fromObjectValue(v))); return new ExprTupleValue(valueMap); } - /** - * {@link ExprCollectionValue} constructor. - */ + /** {@link ExprCollectionValue} constructor. */ public static ExprValue collectionValue(List list) { List valueList = new ArrayList<>(); list.forEach(o -> valueList.add(fromObjectValue(o))); @@ -107,9 +101,7 @@ public static ExprValue nullValue() { return ExprNullValue.of(); } - /** - * Construct ExprValue from Object. - */ + /** Construct ExprValue from Object. */ public static ExprValue fromObjectValue(Object o) { if (null == o) { return LITERAL_NULL; @@ -149,19 +141,17 @@ public static ExprValue fromObjectValue(Object o) { } } - /** - * Construct ExprValue from Object with ExprCoreType. - */ + /** Construct ExprValue from Object with ExprCoreType. */ public static ExprValue fromObjectValue(Object o, ExprCoreType type) { switch (type) { case TIMESTAMP: - return new ExprTimestampValue((String)o); + return new ExprTimestampValue((String) o); case DATE: - return new ExprDateValue((String)o); + return new ExprDateValue((String) o); case TIME: - return new ExprTimeValue((String)o); + return new ExprTimeValue((String) o); case DATETIME: - return new ExprDatetimeValue((String)o); + return new ExprDatetimeValue((String) o); default: return fromObjectValue(o); } diff --git a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java index 815f94a9df..f1979d8666 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java +++ b/core/src/main/java/org/opensearch/sql/data/type/ExprCoreType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.type; import com.google.common.collect.ImmutableMap; @@ -15,25 +14,18 @@ import java.util.Set; import java.util.stream.Collectors; -/** - * Expression Type. - */ +/** Expression Type. */ public enum ExprCoreType implements ExprType { - /** - * Unknown due to unsupported data type. - */ + /** Unknown due to unsupported data type. */ UNKNOWN, /** - * Undefined type for special literal such as NULL. - * As the root of data type tree, it is compatible with any other type. - * In other word, undefined type is the "narrowest" type. + * Undefined type for special literal such as NULL. As the root of data type tree, it is + * compatible with any other type. In other word, undefined type is the "narrowest" type. */ UNDEFINED, - /** - * Numbers. - */ + /** Numbers. */ BYTE(UNDEFINED), SHORT(BYTE), INTEGER(SHORT), @@ -41,43 +33,29 @@ public enum ExprCoreType implements ExprType { FLOAT(LONG), DOUBLE(FLOAT), - /** - * String. - */ + /** String. */ STRING(UNDEFINED), - /** - * Boolean. - */ + /** Boolean. */ BOOLEAN(STRING), - /** - * Date. - */ + /** Date. */ DATE(STRING), TIME(STRING), DATETIME(STRING, DATE, TIME), TIMESTAMP(STRING, DATETIME), INTERVAL(UNDEFINED), - /** - * Struct. - */ + /** Struct. */ STRUCT(UNDEFINED), - /** - * Array. - */ + /** Array. */ ARRAY(UNDEFINED); - /** - * Parents (wider/compatible types) of current base type. - */ + /** Parents (wider/compatible types) of current base type. */ private final List parents = new ArrayList<>(); - /** - * The mapping between Type and legacy JDBC type name. - */ + /** The mapping between Type and legacy JDBC type name. */ private static final Map LEGACY_TYPE_NAME_MAPPING = new ImmutableMap.Builder() .put(STRUCT, "OBJECT") @@ -116,14 +94,12 @@ public String legacyTypeName() { return LEGACY_TYPE_NAME_MAPPING.getOrDefault(this, this.name()); } - /** - * Return all the valid ExprCoreType. - */ + /** Return all the valid ExprCoreType. */ public static List coreTypes() { return Arrays.stream(ExprCoreType.values()) - .filter(type -> type != UNKNOWN) - .filter(type -> type != UNDEFINED) - .collect(Collectors.toList()); + .filter(type -> type != UNKNOWN) + .filter(type -> type != UNDEFINED) + .collect(Collectors.toList()); } public static Set numberTypes() { diff --git a/core/src/main/java/org/opensearch/sql/data/type/ExprType.java b/core/src/main/java/org/opensearch/sql/data/type/ExprType.java index 782714ba70..58d6ee346b 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/ExprType.java +++ b/core/src/main/java/org/opensearch/sql/data/type/ExprType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -13,13 +12,9 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.Expression; -/** - * The Type of {@link Expression} and {@link ExprValue}. - */ +/** The Type of {@link Expression} and {@link ExprValue}. */ public interface ExprType { - /** - * Is compatible with other types. - */ + /** Is compatible with other types. */ default boolean isCompatible(ExprType other) { if (this.equals(other)) { return true; @@ -37,30 +32,25 @@ default boolean isCompatible(ExprType other) { } /** - * Should cast this type to other type or not. By default, cast is always required - * if the given type is different from this type. + * Should cast this type to other type or not. By default, cast is always required if the given + * type is different from this type. + * * @param other other data type - * @return true if cast is required, otherwise false + * @return true if cast is required, otherwise false */ default boolean shouldCast(ExprType other) { return !this.equals(other); } - /** - * Get the parent type. - */ + /** Get the parent type. */ default List getParent() { return Arrays.asList(UNKNOWN); } - /** - * Get the type name. - */ + /** Get the type name. */ String typeName(); - /** - * Get the legacy type name for old engine. - */ + /** Get the legacy type name for old engine. */ default String legacyTypeName() { return typeName(); } diff --git a/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java b/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java index c781e5e775..5f27b1fb3b 100644 --- a/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java +++ b/core/src/main/java/org/opensearch/sql/data/type/WideningTypeRule.java @@ -13,7 +13,6 @@ /** * The definition of widening type rule for expression value. - * *
A
* * diff --git a/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java index ef390dc53b..812c8dba3d 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/ExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java index 13c3606f72..f3285fd6f9 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/NaturalExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java index 03890bba61..82fac55e0c 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java index 589d4b3043..0221e50887 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/NullsLastExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java b/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java index 65fceacf99..ca3cb251f4 100644 --- a/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java +++ b/core/src/main/java/org/opensearch/sql/data/utils/ReverseExprValueOrdering.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import com.google.common.collect.Ordering; diff --git a/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java b/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java index 9167737a70..3d6ddc864e 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java +++ b/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java @@ -9,9 +9,7 @@ import org.opensearch.sql.datasource.model.DataSource; import org.opensearch.sql.datasource.model.DataSourceMetadata; -/** - * DataSource Service manage {@link DataSource}. - */ +/** DataSource Service manage {@link DataSource}. */ public interface DataSourceService { /** @@ -22,21 +20,19 @@ public interface DataSourceService { */ DataSource getDataSource(String dataSourceName); - /** - * Returns all dataSource Metadata objects. The returned objects won't contain - * any of the credential info. + * Returns all dataSource Metadata objects. The returned objects won't contain any of the + * credential info. * - * @param isDefaultDataSourceRequired is used to specify - * if default opensearch connector is required in the output list. + * @param isDefaultDataSourceRequired is used to specify if default opensearch connector is + * required in the output list. * @return set of {@link DataSourceMetadata}. */ Set getDataSourceMetadata(boolean isDefaultDataSourceRequired); - /** - * Returns dataSourceMetadata object with specific name. - * The returned objects won't contain any crendetial info. + * Returns dataSourceMetadata object with specific name. The returned objects won't contain any + * crendetial info. * * @param name name of the {@link DataSource}. * @return set of {@link DataSourceMetadata}. @@ -57,7 +53,6 @@ public interface DataSourceService { */ void updateDataSource(DataSourceMetadata dataSourceMetadata); - /** * Deletes {@link DataSource} corresponding to the DataSource name. * @@ -66,8 +61,8 @@ public interface DataSourceService { void deleteDataSource(String dataSourceName); /** - * Returns true {@link Boolean} if datasource with dataSourceName exists - * or else false {@link Boolean}. + * Returns true {@link Boolean} if datasource with dataSourceName exists or else false {@link + * Boolean}. * * @param dataSourceName name of the {@link DataSource}. */ diff --git a/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java b/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java index 5deb460961..9623102efb 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java +++ b/core/src/main/java/org/opensearch/sql/datasource/model/DataSource.java @@ -12,9 +12,7 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.storage.StorageEngine; -/** - * Each user configured datasource mapping to one instance of DataSource per JVM. - */ +/** Each user configured datasource mapping to one instance of DataSource per JVM. */ @Getter @RequiredArgsConstructor @EqualsAndHashCode @@ -24,7 +22,5 @@ public class DataSource { private final DataSourceType connectorType; - @EqualsAndHashCode.Exclude - private final StorageEngine storageEngine; - + @EqualsAndHashCode.Exclude private final StorageEngine storageEngine; } diff --git a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java index 7945f8aec3..8e5ff7e1a6 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java +++ b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java @@ -5,7 +5,6 @@ package org.opensearch.sql.datasource.model; - import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; import com.fasterxml.jackson.annotation.JsonFormat; @@ -30,25 +29,25 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class DataSourceMetadata { - @JsonProperty - private String name; + @JsonProperty private String name; @JsonProperty @JsonFormat(with = JsonFormat.Feature.ACCEPT_CASE_INSENSITIVE_PROPERTIES) private DataSourceType connector; - @JsonProperty - private List allowedRoles; + @JsonProperty private List allowedRoles; - @JsonProperty - private Map properties; + @JsonProperty private Map properties; /** * Default OpenSearch {@link DataSourceMetadata}. Which is used to register default OpenSearch * {@link DataSource} to {@link DataSourceService}. */ public static DataSourceMetadata defaultOpenSearchDataSourceMetadata() { - return new DataSourceMetadata(DEFAULT_DATASOURCE_NAME, - DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); + return new DataSourceMetadata( + DEFAULT_DATASOURCE_NAME, + DataSourceType.OPENSEARCH, + Collections.emptyList(), + ImmutableMap.of()); } } diff --git a/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java b/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java index 65ea187666..d11f6e4f9e 100644 --- a/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java +++ b/core/src/main/java/org/opensearch/sql/exception/ExpressionEvaluationException.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.exception; -/** - * Exception for Expression Evaluation. - */ +/** Exception for Expression Evaluation. */ public class ExpressionEvaluationException extends QueryEngineException { public ExpressionEvaluationException(String message) { super(message); diff --git a/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java b/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java index 9383bece57..57f75c2cb0 100644 --- a/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java +++ b/core/src/main/java/org/opensearch/sql/exception/NoCursorException.java @@ -6,8 +6,7 @@ package org.opensearch.sql.exception; /** - * This should be thrown on serialization of a PhysicalPlan tree if paging is finished. - * Processing of such exception should outcome of responding no cursor to the user. + * This should be thrown on serialization of a PhysicalPlan tree if paging is finished. Processing + * of such exception should outcome of responding no cursor to the user. */ -public class NoCursorException extends RuntimeException { -} +public class NoCursorException extends RuntimeException {} diff --git a/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java b/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java index ce90ecff5c..b3d13bef71 100644 --- a/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java +++ b/core/src/main/java/org/opensearch/sql/exception/QueryEngineException.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.exception; -/** - * Query analysis abstract exception. - */ +/** Query analysis abstract exception. */ public class QueryEngineException extends RuntimeException { public QueryEngineException(String message) { diff --git a/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java b/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java index 8673dbfc3c..6e0c184af8 100644 --- a/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java +++ b/core/src/main/java/org/opensearch/sql/exception/SemanticCheckException.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.exception; -/** - * Semantic Check Exception. - */ +/** Semantic Check Exception. */ public class SemanticCheckException extends QueryEngineException { public SemanticCheckException(String message) { super(message); diff --git a/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java b/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java index 6ed8e02e5f..8117304571 100644 --- a/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java +++ b/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java @@ -5,8 +5,5 @@ package org.opensearch.sql.exception; -/** - * This should be thrown by V2 engine to support fallback scenario. - */ -public class UnsupportedCursorRequestException extends RuntimeException { -} +/** This should be thrown by V2 engine to support fallback scenario. */ +public class UnsupportedCursorRequestException extends RuntimeException {} diff --git a/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java b/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java index 8a3162068f..22d14972eb 100644 --- a/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java +++ b/core/src/main/java/org/opensearch/sql/executor/ExecutionContext.java @@ -9,12 +9,9 @@ import lombok.Getter; import org.opensearch.sql.storage.split.Split; -/** - * Execution context hold planning related information. - */ +/** Execution context hold planning related information. */ public class ExecutionContext { - @Getter - private final Optional split; + @Getter private final Optional split; public ExecutionContext(Split split) { this.split = Optional.of(split); diff --git a/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java b/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java index 9465da22c9..43b8ccb62e 100644 --- a/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java +++ b/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.executor; import java.util.List; @@ -17,39 +16,33 @@ import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * Execution engine that encapsulates execution details. - */ +/** Execution engine that encapsulates execution details. */ public interface ExecutionEngine { /** - * Execute physical plan and call back response listener. + * Execute physical plan and call back response listener.
* Todo. deprecated this interface after finalize {@link ExecutionContext}. * - * @param plan executable physical plan + * @param plan executable physical plan * @param listener response listener */ void execute(PhysicalPlan plan, ResponseListener listener); - /** - * Execute physical plan with {@link ExecutionContext} and call back response listener. - */ - void execute(PhysicalPlan plan, ExecutionContext context, - ResponseListener listener); + /** Execute physical plan with {@link ExecutionContext} and call back response listener. */ + void execute( + PhysicalPlan plan, ExecutionContext context, ResponseListener listener); /** - * Explain physical plan and call back response listener. The reason why this has to - * be part of execution engine interface is that the physical plan probably needs to - * be executed to get more info for profiling, such as actual execution time, rows fetched etc. + * Explain physical plan and call back response listener. The reason why this has to be part of + * execution engine interface is that the physical plan probably needs to be executed to get more + * info for profiling, such as actual execution time, rows fetched etc. * - * @param plan physical plan to explain + * @param plan physical plan to explain * @param listener response listener */ void explain(PhysicalPlan plan, ResponseListener listener); - /** - * Data class that encapsulates ExprValue. - */ + /** Data class that encapsulates ExprValue. */ @Data class QueryResponse { private final Schema schema; @@ -70,8 +63,8 @@ public static class Column { } /** - * Data class that encapsulates explain result. This can help decouple core engine - * from concrete explain response format. + * Data class that encapsulates explain result. This can help decouple core engine from concrete + * explain response format. */ @Data class ExplainResponse { @@ -86,5 +79,4 @@ class ExplainResponseNode { private Map description; private List children; } - } diff --git a/core/src/main/java/org/opensearch/sql/executor/Explain.java b/core/src/main/java/org/opensearch/sql/executor/Explain.java index 7c16e0b720..0f05b99383 100644 --- a/core/src/main/java/org/opensearch/sql/executor/Explain.java +++ b/core/src/main/java/org/opensearch/sql/executor/Explain.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.executor; import com.google.common.collect.ImmutableMap; @@ -35,11 +34,9 @@ import org.opensearch.sql.planner.physical.WindowOperator; import org.opensearch.sql.storage.TableScanOperator; -/** - * Visitor that explains a physical plan to JSON format. - */ +/** Visitor that explains a physical plan to JSON format. */ public class Explain extends PhysicalPlanNodeVisitor - implements Function { + implements Function { @Override public ExplainResponse apply(PhysicalPlan plan) { @@ -48,109 +45,160 @@ public ExplainResponse apply(PhysicalPlan plan) { @Override public ExplainResponseNode visitProject(ProjectOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "fields", node.getProjectList().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("fields", node.getProjectList().toString()))); } @Override public ExplainResponseNode visitFilter(FilterOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "conditions", node.getConditions().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("conditions", node.getConditions().toString()))); } @Override public ExplainResponseNode visitSort(SortOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "sortList", describeSortList(node.getSortList())))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("sortList", describeSortList(node.getSortList())))); } @Override public ExplainResponseNode visitTableScan(TableScanOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "request", node.toString()))); + return explain( + node, + context, + explainNode -> explainNode.setDescription(ImmutableMap.of("request", node.toString()))); } @Override public ExplainResponseNode visitAggregation(AggregationOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "aggregators", node.getAggregatorList().toString(), - "groupBy", node.getGroupByExprList().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "aggregators", node.getAggregatorList().toString(), + "groupBy", node.getGroupByExprList().toString()))); } @Override public ExplainResponseNode visitWindow(WindowOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "function", node.getWindowFunction().toString(), - "definition", ImmutableMap.of( - "partitionBy", node.getWindowDefinition().getPartitionByList().toString(), - "sortList", describeSortList(node.getWindowDefinition().getSortList()))))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "function", node.getWindowFunction().toString(), + "definition", + ImmutableMap.of( + "partitionBy", + node.getWindowDefinition().getPartitionByList().toString(), + "sortList", + describeSortList(node.getWindowDefinition().getSortList()))))); } @Override public ExplainResponseNode visitRename(RenameOperator node, Object context) { Map renameMappingDescription = - node.getMapping() - .entrySet() - .stream() - .collect(Collectors.toMap( - e -> e.getKey().toString(), - e -> e.getValue().toString())); + node.getMapping().entrySet().stream() + .collect(Collectors.toMap(e -> e.getKey().toString(), e -> e.getValue().toString())); - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "mapping", renameMappingDescription))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription(ImmutableMap.of("mapping", renameMappingDescription))); } @Override public ExplainResponseNode visitRemove(RemoveOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "removeList", node.getRemoveList().toString()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("removeList", node.getRemoveList().toString()))); } @Override public ExplainResponseNode visitEval(EvalOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "expressions", convertPairListToMap(node.getExpressionList())))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of("expressions", convertPairListToMap(node.getExpressionList())))); } @Override public ExplainResponseNode visitDedupe(DedupeOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "dedupeList", node.getDedupeList().toString(), - "allowedDuplication", node.getAllowedDuplication(), - "keepEmpty", node.getKeepEmpty(), - "consecutive", node.getConsecutive()))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "dedupeList", node.getDedupeList().toString(), + "allowedDuplication", node.getAllowedDuplication(), + "keepEmpty", node.getKeepEmpty(), + "consecutive", node.getConsecutive()))); } @Override public ExplainResponseNode visitRareTopN(RareTopNOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "commandType", node.getCommandType(), - "noOfResults", node.getNoOfResults(), - "fields", node.getFieldExprList().toString(), - "groupBy", node.getGroupByExprList().toString() - ))); + return explain( + node, + context, + explainNode -> + explainNode.setDescription( + ImmutableMap.of( + "commandType", node.getCommandType(), + "noOfResults", node.getNoOfResults(), + "fields", node.getFieldExprList().toString(), + "groupBy", node.getGroupByExprList().toString()))); } @Override public ExplainResponseNode visitValues(ValuesOperator node, Object context) { - return explain(node, context, explainNode -> explainNode.setDescription(ImmutableMap.of( - "values", node.getValues()))); + return explain( + node, + context, + explainNode -> explainNode.setDescription(ImmutableMap.of("values", node.getValues()))); } @Override public ExplainResponseNode visitLimit(LimitOperator node, Object context) { - return explain(node, context, explanNode -> explanNode.setDescription(ImmutableMap.of( - "limit", node.getLimit(), "offset", node.getOffset()))); + return explain( + node, + context, + explanNode -> + explanNode.setDescription( + ImmutableMap.of("limit", node.getLimit(), "offset", node.getOffset()))); } @Override public ExplainResponseNode visitNested(NestedOperator node, Object context) { - return explain(node, context, explanNode -> explanNode.setDescription(ImmutableMap.of( - "nested", node.getFields()))); + return explain( + node, + context, + explanNode -> explanNode.setDescription(ImmutableMap.of("nested", node.getFields()))); } - protected ExplainResponseNode explain(PhysicalPlan node, Object context, - Consumer doExplain) { + protected ExplainResponseNode explain( + PhysicalPlan node, Object context, Consumer doExplain) { ExplainResponseNode explainNode = new ExplainResponseNode(getOperatorName(node)); List children = new ArrayList<>(); @@ -169,19 +217,18 @@ private String getOperatorName(PhysicalPlan node) { private Map convertPairListToMap(List> pairs) { return pairs.stream() - .collect(Collectors.toMap( - p -> p.getLeft().toString(), - p -> p.getRight().toString())); + .collect(Collectors.toMap(p -> p.getLeft().toString(), p -> p.getRight().toString())); } private Map> describeSortList( List> sortList) { return sortList.stream() - .collect(Collectors.toMap( - p -> p.getRight().toString(), - p -> ImmutableMap.of( - "sortOrder", p.getLeft().getSortOrder().toString(), - "nullOrder", p.getLeft().getNullOrder().toString()))); + .collect( + Collectors.toMap( + p -> p.getRight().toString(), + p -> + ImmutableMap.of( + "sortOrder", p.getLeft().getSortOrder().toString(), + "nullOrder", p.getLeft().getNullOrder().toString()))); } - } diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryId.java b/core/src/main/java/org/opensearch/sql/executor/QueryId.java index 933cb5d82d..eea8166e2a 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryId.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryId.java @@ -12,18 +12,14 @@ import org.apache.commons.lang3.RandomStringUtils; import org.opensearch.sql.executor.execution.AbstractPlan; -/** - * Query id of {@link AbstractPlan}. - */ +/** Query id of {@link AbstractPlan}. */ public class QueryId { - /** - * Query id. - */ - @Getter - private final String queryId; + /** Query id. */ + @Getter private final String queryId; /** * Generate {@link QueryId}. + * * @return {@link QueryId}. */ public static QueryId queryId() { diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryManager.java b/core/src/main/java/org/opensearch/sql/executor/QueryManager.java index 5b41d7ce2e..44d6a1cd84 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryManager.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryManager.java @@ -11,13 +11,14 @@ import org.opensearch.sql.executor.execution.AbstractPlan; /** - * QueryManager is the high-level interface of core engine. - * Frontend submit {@link AbstractPlan} to QueryManager. + * QueryManager is the high-level interface of core engine. Frontend submit an {@link AbstractPlan} + * to QueryManager. */ public interface QueryManager { /** * Submit {@link AbstractPlan}. + * * @param queryPlan {@link AbstractPlan}. * @return {@link QueryId}. */ diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryService.java b/core/src/main/java/org/opensearch/sql/executor/QueryService.java index 94e7081920..3e939212bf 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryService.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryService.java @@ -18,9 +18,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * The low level interface of core engine. - */ +/** The low level interface of core engine. */ @RequiredArgsConstructor public class QueryService { @@ -31,14 +29,14 @@ public class QueryService { private final Planner planner; /** - * Execute the {@link UnresolvedPlan}, using {@link ResponseListener} to get response. + * Execute the {@link UnresolvedPlan}, using {@link ResponseListener} to get response.
* Todo. deprecated this interface after finalize {@link PlanContext}. * - * @param plan {@link UnresolvedPlan} + * @param plan {@link UnresolvedPlan} * @param listener {@link ResponseListener} */ - public void execute(UnresolvedPlan plan, - ResponseListener listener) { + public void execute( + UnresolvedPlan plan, ResponseListener listener) { try { executePlan(analyze(plan), PlanContext.emptyPlanContext(), listener); } catch (Exception e) { @@ -48,22 +46,24 @@ public void execute(UnresolvedPlan plan, /** * Execute the {@link UnresolvedPlan}, with {@link PlanContext} and using {@link ResponseListener} - * to get response. + * to get response.
* Todo. Pass split from PlanContext to ExecutionEngine in following PR. * * @param plan {@link LogicalPlan} * @param planContext {@link PlanContext} * @param listener {@link ResponseListener} */ - public void executePlan(LogicalPlan plan, - PlanContext planContext, - ResponseListener listener) { + public void executePlan( + LogicalPlan plan, + PlanContext planContext, + ResponseListener listener) { try { planContext .getSplit() .ifPresentOrElse( split -> executionEngine.execute(plan(plan), new ExecutionContext(split), listener), - () -> executionEngine.execute( + () -> + executionEngine.execute( plan(plan), ExecutionContext.emptyExecutionContext(), listener)); } catch (Exception e) { listener.onFailure(e); @@ -71,14 +71,14 @@ public void executePlan(LogicalPlan plan, } /** - * Explain the query in {@link UnresolvedPlan} using {@link ResponseListener} to - * get and format explain response. + * Explain the query in {@link UnresolvedPlan} using {@link ResponseListener} to get and format + * explain response. * * @param plan {@link UnresolvedPlan} * @param listener {@link ResponseListener} for explain response */ - public void explain(UnresolvedPlan plan, - ResponseListener listener) { + public void explain( + UnresolvedPlan plan, ResponseListener listener) { try { executionEngine.explain(plan(analyze(plan)), listener); } catch (Exception e) { @@ -86,16 +86,12 @@ public void explain(UnresolvedPlan plan, } } - /** - * Analyze {@link UnresolvedPlan}. - */ + /** Analyze {@link UnresolvedPlan}. */ public LogicalPlan analyze(UnresolvedPlan plan) { return analyzer.analyze(plan, new AnalysisContext()); } - /** - * Translate {@link LogicalPlan} to {@link PhysicalPlan}. - */ + /** Translate {@link LogicalPlan} to {@link PhysicalPlan}. */ public PhysicalPlan plan(LogicalPlan plan) { return planner.plan(plan); } diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java index 1654293c04..23091777ce 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/AbstractPlan.java @@ -8,28 +8,20 @@ package org.opensearch.sql.executor.execution; - import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.QueryId; -/** - * AbstractPlan represent the execution entity of the Statement. - */ +/** AbstractPlan represent the execution entity of the Statement. */ @RequiredArgsConstructor public abstract class AbstractPlan { - /** - * Uniq query id. - */ - @Getter - private final QueryId queryId; + /** Uniq query id. */ + @Getter private final QueryId queryId; - /** - * Start query execution. - */ + /** Start query execution. */ public abstract void execute(); /** diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java index 0ea5266084..103e0e1081 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/CommandPlan.java @@ -15,26 +15,25 @@ import org.opensearch.sql.executor.QueryService; /** - * Query plan which does not reflect a search query being executed. - * It contains a command or an action, for example, a DDL query. + * Query plan which does not reflect a search query being executed. It contains a command or an + * action, for example, a DDL query. */ public class CommandPlan extends AbstractPlan { - /** - * The query plan ast. - */ + /** The query plan ast. */ protected final UnresolvedPlan plan; - /** - * Query service. - */ + /** Query service. */ protected final QueryService queryService; protected final ResponseListener listener; /** Constructor. */ - public CommandPlan(QueryId queryId, UnresolvedPlan plan, QueryService queryService, - ResponseListener listener) { + public CommandPlan( + QueryId queryId, + UnresolvedPlan plan, + QueryService queryService, + ResponseListener listener) { super(queryId); this.plan = plan; this.queryService = queryService; diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java index 8c784f82ed..7868a39001 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/ExplainPlan.java @@ -12,21 +12,18 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.QueryId; -/** - * Explain plan. - */ +/** Explain plan. */ public class ExplainPlan extends AbstractPlan { private final AbstractPlan plan; private final ResponseListener explainListener; - /** - * Constructor. - */ - public ExplainPlan(QueryId queryId, - AbstractPlan plan, - ResponseListener explainListener) { + /** Constructor. */ + public ExplainPlan( + QueryId queryId, + AbstractPlan plan, + ResponseListener explainListener) { super(queryId); this.plan = plan; this.explainListener = explainListener; diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java index aeecf3e76f..0ebdb875f6 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlan.java @@ -17,19 +17,13 @@ import org.opensearch.sql.executor.QueryId; import org.opensearch.sql.executor.QueryService; -/** - * Query plan which includes a select query. - */ +/** Query plan which includes a select query. */ public class QueryPlan extends AbstractPlan { - /** - * The query plan ast. - */ + /** The query plan ast. */ protected final UnresolvedPlan plan; - /** - * Query service. - */ + /** Query service. */ protected final QueryService queryService; protected final ResponseListener listener; @@ -75,8 +69,9 @@ public void execute() { @Override public void explain(ResponseListener listener) { if (pageSize.isPresent()) { - listener.onFailure(new NotImplementedException( - "`explain` feature for paginated requests is not implemented yet.")); + listener.onFailure( + new NotImplementedException( + "`explain` feature for paginated requests is not implemented yet.")); } else { queryService.explain(plan, listener); } diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java index 3273eb3c18..52d7126e17 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java @@ -27,9 +27,7 @@ import org.opensearch.sql.executor.QueryService; import org.opensearch.sql.executor.pagination.CanPaginateVisitor; -/** - * QueryExecution Factory. - */ +/** QueryExecution Factory. */ @RequiredArgsConstructor public class QueryPlanFactory extends AbstractNodeVisitor< @@ -38,14 +36,12 @@ public class QueryPlanFactory Optional>, Optional>>> { - /** - * Query Service. - */ + /** Query Service. */ private final QueryService queryService; /** - * NO_CONSUMER_RESPONSE_LISTENER should never be called. It is only used as constructor - * parameter of {@link QueryPlan}. + * NO_CONSUMER_RESPONSE_LISTENER should never be called. It is only used as constructor parameter + * of {@link QueryPlan}. */ @VisibleForTesting protected static final ResponseListener @@ -64,9 +60,7 @@ public void onFailure(Exception e) { } }; - /** - * Create QueryExecution from Statement. - */ + /** Create QueryExecution from Statement. */ public AbstractPlan create( Statement statement, Optional> queryListener, @@ -74,12 +68,12 @@ public AbstractPlan create( return statement.accept(this, Pair.of(queryListener, explainListener)); } - /** - * Creates a QueryPlan from a cursor. - */ - public AbstractPlan create(String cursor, boolean isExplain, - ResponseListener queryResponseListener, - ResponseListener explainListener) { + /** Creates a QueryPlan from a cursor. */ + public AbstractPlan create( + String cursor, + boolean isExplain, + ResponseListener queryResponseListener, + ResponseListener explainListener) { QueryId queryId = QueryId.queryId(); var plan = new QueryPlan(queryId, new FetchCursor(cursor), queryService, queryResponseListener); return isExplain ? new ExplainPlan(queryId, plan, explainListener) : plan; @@ -89,27 +83,32 @@ boolean canConvertToCursor(UnresolvedPlan plan) { return plan.accept(new CanPaginateVisitor(), null); } - /** - * Creates a {@link CloseCursor} command on a cursor. - */ - public AbstractPlan createCloseCursor(String cursor, - ResponseListener queryResponseListener) { - return new CommandPlan(QueryId.queryId(), new CloseCursor().attach(new FetchCursor(cursor)), - queryService, queryResponseListener); + /** Creates a {@link CloseCursor} command on a cursor. */ + public AbstractPlan createCloseCursor( + String cursor, ResponseListener queryResponseListener) { + return new CommandPlan( + QueryId.queryId(), + new CloseCursor().attach(new FetchCursor(cursor)), + queryService, + queryResponseListener); } @Override public AbstractPlan visitQuery( Query node, - Pair>, - Optional>> + Pair< + Optional>, + Optional>> context) { Preconditions.checkArgument( context.getLeft().isPresent(), "[BUG] query listener must be not null"); if (node.getFetchSize() > 0) { if (canConvertToCursor(node.getPlan())) { - return new QueryPlan(QueryId.queryId(), node.getPlan(), node.getFetchSize(), + return new QueryPlan( + QueryId.queryId(), + node.getPlan(), + node.getFetchSize(), queryService, context.getLeft().get()); } else { @@ -117,24 +116,24 @@ public AbstractPlan visitQuery( throw new UnsupportedCursorRequestException(); } } else { - return new QueryPlan(QueryId.queryId(), node.getPlan(), queryService, - context.getLeft().get()); + return new QueryPlan( + QueryId.queryId(), node.getPlan(), queryService, context.getLeft().get()); } } @Override public AbstractPlan visitExplain( Explain node, - Pair>, - Optional>> + Pair< + Optional>, + Optional>> context) { Preconditions.checkArgument( context.getRight().isPresent(), "[BUG] explain listener must be not null"); return new ExplainPlan( QueryId.queryId(), - create(node.getStatement(), - Optional.of(NO_CONSUMER_RESPONSE_LISTENER), Optional.empty()), + create(node.getStatement(), Optional.of(NO_CONSUMER_RESPONSE_LISTENER), Optional.empty()), context.getRight().get()); } } diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java index 9bb37b064c..5adb9644e9 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/StreamingQueryPlan.java @@ -24,9 +24,7 @@ import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; import org.opensearch.sql.planner.logical.LogicalRelation; -/** - * Streaming Query Plan. - */ +/** Streaming Query Plan. */ public class StreamingQueryPlan extends QueryPlan { private static final Logger log = LogManager.getLogger(StreamingQueryPlan.class); @@ -35,14 +33,13 @@ public class StreamingQueryPlan extends QueryPlan { private MicroBatchStreamingExecution streamingExecution; - /** - * constructor. - */ - public StreamingQueryPlan(QueryId queryId, - UnresolvedPlan plan, - QueryService queryService, - ResponseListener listener, - ExecutionStrategy executionStrategy) { + /** constructor. */ + public StreamingQueryPlan( + QueryId queryId, + UnresolvedPlan plan, + QueryService queryService, + ResponseListener listener, + ExecutionStrategy executionStrategy) { super(queryId, plan, queryService, listener); this.executionStrategy = executionStrategy; @@ -70,15 +67,13 @@ public void execute() { } interface ExecutionStrategy { - /** - * execute task. - */ + /** execute task. */ void execute(Runnable task) throws InterruptedException; } /** - * execute task with fixed interval. - * if task run time < interval, trigger next task on next interval. + * execute task with fixed interval.
+ * if task run time < interval, trigger next task on next interval.
* if task run time >= interval, trigger next task immediately. */ @RequiredArgsConstructor diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java b/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java index e304c132bd..9f14ba1e5d 100644 --- a/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java @@ -41,18 +41,26 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * Use this unresolved plan visitor to check if a plan can be serialized by PaginatedPlanCache. - * If
plan.accept(new CanPaginateVisitor(...))
returns true, - * then PaginatedPlanCache.convertToCursor will succeed. Otherwise, it will fail. - * The purpose of this visitor is to activate legacy engine fallback mechanism. - * Currently, V2 engine does not support queries with: - * - aggregation (GROUP BY clause or aggregation functions like min/max) - * - in memory aggregation (window function) - * - LIMIT/OFFSET clause(s) - * - without FROM clause - * - JOIN - * - a subquery - * V2 also requires that the table being queried should be an OpenSearch index. + * Use this unresolved plan visitor to check if a plan can be serialized by PaginatedPlanCache.
+ * If + * + *
plan.accept(new CanPaginateVisitor(...))
+ * + * returns true, PaginatedPlanCache.convertToCursor will succeed.
+ * Otherwise, it will fail.
+ * The purpose of this visitor is to activate legacy engine fallback mechanism.
+ * Currently, V2 engine does not support queries with:
+ * + *
    + *
  • aggregation (GROUP BY clause or aggregation functions like min/max) + *
  • in memory aggregation (window function) + *
  • LIMIT/OFFSET clause(s) + *
  • without FROM clause + *
  • JOIN + *
  • a subquery + *
+ * + * V2 also requires that the table being queried should be an OpenSearch index.
* See PaginatedPlanCache.canConvertToCursor for usage. */ public class CanPaginateVisitor extends AbstractNodeVisitor { @@ -80,8 +88,8 @@ protected Boolean canPaginate(Node node, Object context) { // https://github.com/opensearch-project/sql/issues/1471 @Override public Boolean visitSort(Sort node, Object context) { - return node.getSortList().stream().allMatch(f -> f.getField() instanceof QualifiedName - && visitField(f, context)) + return node.getSortList().stream() + .allMatch(f -> f.getField() instanceof QualifiedName && visitField(f, context)) && canPaginate(node, context); } @@ -116,8 +124,8 @@ public Boolean visitLiteral(Literal node, Object context) { @Override public Boolean visitField(Field node, Object context) { - return canPaginate(node, context) && node.getFieldArgs().stream() - .allMatch(n -> n.accept(this, context)); + return canPaginate(node, context) + && node.getFieldArgs().stream().allMatch(n -> n.accept(this, context)); } @Override @@ -192,8 +200,8 @@ public Boolean visitFunction(Function node, Object context) { @Override public Boolean visitIn(In node, Object context) { - return canPaginate(node, context) && node.getValueList().stream() - .allMatch(n -> n.accept(this, context)); + return canPaginate(node, context) + && node.getValueList().stream().allMatch(n -> n.accept(this, context)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java b/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java index bb320f5c67..59a14a2d72 100644 --- a/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java @@ -14,8 +14,7 @@ public class Cursor { public static final Cursor None = new Cursor(null); - @Getter - private final String data; + @Getter private final String data; public String toString() { return data; diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java b/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java index 07cf174d73..40a6cdfa84 100644 --- a/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/PlanSerializer.java @@ -24,8 +24,8 @@ import org.opensearch.sql.storage.StorageEngine; /** - * This class is entry point to paged requests. It is responsible to cursor serialization - * and deserialization. + * This class is entry point to paged requests. It is responsible to cursor serialization and + * deserialization. */ @RequiredArgsConstructor public class PlanSerializer { @@ -33,14 +33,11 @@ public class PlanSerializer { private final StorageEngine engine; - - /** - * Converts a physical plan tree to a cursor. - */ + /** Converts a physical plan tree to a cursor. */ public Cursor convertToCursor(PhysicalPlan plan) { try { - return new Cursor(CURSOR_PREFIX - + serialize(((SerializablePlan) plan).getPlanForSerialization())); + return new Cursor( + CURSOR_PREFIX + serialize(((SerializablePlan) plan).getPlanForSerialization())); // ClassCastException thrown when a plan in the tree doesn't implement SerializablePlan } catch (NotSerializableException | ClassCastException | NoCursorException e) { return Cursor.None; @@ -49,6 +46,7 @@ public Cursor convertToCursor(PhysicalPlan plan) { /** * Serializes and compresses the object. + * * @param object The object. * @return Encoded binary data. */ @@ -61,9 +59,12 @@ protected String serialize(Serializable object) throws NotSerializableException ByteArrayOutputStream out = new ByteArrayOutputStream(); // GZIP provides 35-45%, lzma from apache commons-compress has few % better compression - GZIPOutputStream gzip = new GZIPOutputStream(out) { { - this.def.setLevel(Deflater.BEST_COMPRESSION); - } }; + GZIPOutputStream gzip = + new GZIPOutputStream(out) { + { + this.def.setLevel(Deflater.BEST_COMPRESSION); + } + }; gzip.write(output.toByteArray()); gzip.close(); @@ -77,24 +78,23 @@ protected String serialize(Serializable object) throws NotSerializableException /** * Decompresses and deserializes the binary data. + * * @param code Encoded binary data. * @return An object. */ protected Serializable deserialize(String code) { try { - GZIPInputStream gzip = new GZIPInputStream( - new ByteArrayInputStream(HashCode.fromString(code).asBytes())); - ObjectInputStream objectInput = new CursorDeserializationStream( - new ByteArrayInputStream(gzip.readAllBytes())); + GZIPInputStream gzip = + new GZIPInputStream(new ByteArrayInputStream(HashCode.fromString(code).asBytes())); + ObjectInputStream objectInput = + new CursorDeserializationStream(new ByteArrayInputStream(gzip.readAllBytes())); return (Serializable) objectInput.readObject(); } catch (Exception e) { throw new IllegalStateException("Failed to deserialize object", e); } } - /** - * Converts a cursor to a physical plan tree. - */ + /** Converts a cursor to a physical plan tree. */ public PhysicalPlan convertToPlan(String cursor) { if (!cursor.startsWith(CURSOR_PREFIX)) { throw new UnsupportedOperationException("Unsupported cursor"); diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java b/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java index cd7d7dae5a..ab03c4fb53 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/Batch.java @@ -8,9 +8,7 @@ import lombok.Data; import org.opensearch.sql.storage.split.Split; -/** - * A batch of streaming execution. - */ +/** A batch of streaming execution. */ @Data public class Batch { private final Split split; diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java b/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java index c31ed18c57..4e05484f15 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecution.java @@ -20,9 +20,7 @@ import org.opensearch.sql.planner.PlanContext; import org.opensearch.sql.planner.logical.LogicalPlan; -/** - * Micro batch streaming execution. - */ +/** Micro batch streaming execution. */ public class MicroBatchStreamingExecution { private static final Logger log = LogManager.getLogger(MicroBatchStreamingExecution.class); @@ -46,9 +44,7 @@ public class MicroBatchStreamingExecution { /** keep track the latest commit batchId. */ private final MetadataLog committedLog; - /** - * Constructor. - */ + /** Constructor. */ public MicroBatchStreamingExecution( StreamingSource source, LogicalPlan batchPlan, @@ -63,9 +59,7 @@ public MicroBatchStreamingExecution( this.committedLog = committedLog; } - /** - * Pull the {@link Batch} from {@link StreamingSource} and execute the {@link Batch}. - */ + /** Pull the {@link Batch} from {@link StreamingSource} and execute the {@link Batch}. */ public void execute() { Long latestBatchId = offsetLog.getLatest().map(Pair::getKey).orElse(INITIAL_LATEST_BATCH_ID); Long latestCommittedBatchId = diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java b/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java index 00f040e437..27960da84d 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/Offset.java @@ -7,9 +7,7 @@ import lombok.Data; -/** - * Offset. - */ +/** Offset. */ @Data public class Offset { diff --git a/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java b/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java index ebd3fa714b..2b45a45d9a 100644 --- a/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java +++ b/core/src/main/java/org/opensearch/sql/executor/streaming/StreamingSource.java @@ -7,9 +7,7 @@ import java.util.Optional; -/** - * Streaming source. - */ +/** Streaming source. */ public interface StreamingSource { /** * Get current {@link Offset} of stream data. diff --git a/core/src/main/java/org/opensearch/sql/expression/DSL.java b/core/src/main/java/org/opensearch/sql/expression/DSL.java index 3f1897e483..4341668b69 100644 --- a/core/src/main/java/org/opensearch/sql/expression/DSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/DSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import java.util.Arrays; @@ -29,8 +28,7 @@ public class DSL { - private DSL() { - } + private DSL() {} public static LiteralExpression literal(Byte value) { return new LiteralExpression(ExprValueUtils.byteValue(value)); @@ -68,9 +66,7 @@ public static LiteralExpression literal(ExprValue value) { return new LiteralExpression(value); } - /** - * Wrap a number to {@link LiteralExpression}. - */ + /** Wrap a number to {@link LiteralExpression}. */ public static LiteralExpression literal(Number value) { if (value instanceof Integer) { return new LiteralExpression(ExprValueUtils.integerValue(value.intValue())); @@ -88,21 +84,21 @@ public static ReferenceExpression ref(String ref, ExprType type) { } /** - * Wrap a named expression if not yet. The intent is that different languages may use - * Alias or not when building AST. This caused either named or unnamed expression - * is resolved by analyzer. To make unnamed expression acceptable for logical project, - * it is required to wrap it by named expression here before passing to logical project. + * Wrap a named expression if not yet. The intent is that different languages may use Alias or not + * when building AST. This caused either named or unnamed expression is resolved by analyzer. To + * make unnamed expression acceptable for logical project, it is required to wrap it by named + * expression here before passing to logical project. * - * @param expression expression - * @return expression if named already or expression wrapped by named expression. + * @param expression expression + * @return expression if named already or expression wrapped by named expression. */ public static NamedExpression named(Expression expression) { if (expression instanceof NamedExpression) { return (NamedExpression) expression; } if (expression instanceof ParseExpression) { - return named(((ParseExpression) expression).getIdentifier().valueOf().stringValue(), - expression); + return named( + ((ParseExpression) expression).getIdentifier().valueOf().stringValue(), expression); } return named(expression.toString(), expression); } @@ -127,18 +123,18 @@ public static NamedArgumentExpression namedArgument(String name, String value) { return namedArgument(name, literal(value)); } - public static GrokExpression grok(Expression sourceField, Expression pattern, - Expression identifier) { + public static GrokExpression grok( + Expression sourceField, Expression pattern, Expression identifier) { return new GrokExpression(sourceField, pattern, identifier); } - public static RegexExpression regex(Expression sourceField, Expression pattern, - Expression identifier) { + public static RegexExpression regex( + Expression sourceField, Expression pattern, Expression identifier) { return new RegexExpression(sourceField, pattern, identifier); } - public static PatternsExpression patterns(Expression sourceField, Expression pattern, - Expression identifier) { + public static PatternsExpression patterns( + Expression sourceField, Expression pattern, Expression identifier) { return new PatternsExpression(sourceField, pattern, identifier); } @@ -268,7 +264,6 @@ public static FunctionExpression signum(Expression... expressions) { public static FunctionExpression sinh(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.SINH, expressions); - } public static FunctionExpression sqrt(Expression... expressions) { @@ -364,8 +359,7 @@ public static FunctionExpression dayname(Expression... expressions) { } public static FunctionExpression dayofmonth( - FunctionProperties functionProperties, - Expression... expressions) { + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.DAYOFMONTH, expressions); } @@ -379,8 +373,7 @@ public static FunctionExpression dayofyear(Expression... expressions) { } public static FunctionExpression day_of_month( - FunctionProperties functionProperties, - Expression... expressions) { + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.DAY_OF_MONTH, expressions); } @@ -394,8 +387,8 @@ public static FunctionExpression day_of_week( return compile(functionProperties, BuiltinFunctionName.DAY_OF_WEEK, expressions); } - public static FunctionExpression extract(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression extract( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.EXTRACT, expressions); } @@ -419,8 +412,8 @@ public static FunctionExpression hour_of_day(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.HOUR_OF_DAY, expressions); } - public static FunctionExpression last_day(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression last_day( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.LAST_DAY, expressions); } @@ -477,14 +470,13 @@ public static FunctionExpression timestamp(Expression... expressions) { return timestamp(FunctionProperties.None, expressions); } - public static FunctionExpression timestamp(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression timestamp( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIMESTAMP, expressions); } public static FunctionExpression date_format( - FunctionProperties functionProperties, - Expression... expressions) { + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.DATE_FORMAT, expressions); } @@ -492,8 +484,8 @@ public static FunctionExpression to_days(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.TO_DAYS, expressions); } - public static FunctionExpression to_seconds(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression to_seconds( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TO_SECONDS, expressions); } @@ -506,8 +498,8 @@ public static FunctionExpression week( return compile(functionProperties, BuiltinFunctionName.WEEK, expressions); } - public static FunctionExpression weekday(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression weekday( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.WEEKDAY, expressions); } @@ -530,13 +522,13 @@ public static FunctionExpression yearweek( return compile(functionProperties, BuiltinFunctionName.YEARWEEK, expressions); } - public static FunctionExpression str_to_date(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression str_to_date( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.STR_TO_DATE, expressions); } - + public static FunctionExpression sec_to_time(Expression... expressions) { - return compile(FunctionProperties.None, BuiltinFunctionName.SEC_TO_TIME, expressions); + return compile(FunctionProperties.None, BuiltinFunctionName.SEC_TO_TIME, expressions); } public static FunctionExpression substr(Expression... expressions) { @@ -771,8 +763,7 @@ public static FunctionExpression iffunction(Expression... expressions) { return compile(FunctionProperties.None, BuiltinFunctionName.IF, expressions); } - public static Expression cases(Expression defaultResult, - WhenClause... whenClauses) { + public static Expression cases(Expression defaultResult, WhenClause... whenClauses) { return new CaseClause(Arrays.asList(whenClauses), defaultResult); } @@ -884,53 +875,52 @@ public static FunctionExpression score_query(Expression... args) { return compile(FunctionProperties.None, BuiltinFunctionName.SCORE_QUERY, args); } - public static FunctionExpression now(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression now(FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.NOW, args); } - public static FunctionExpression current_timestamp(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression current_timestamp( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURRENT_TIMESTAMP, args); } - public static FunctionExpression localtimestamp(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression localtimestamp( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.LOCALTIMESTAMP, args); } - public static FunctionExpression localtime(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression localtime( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.LOCALTIME, args); } - public static FunctionExpression sysdate(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression sysdate( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.SYSDATE, args); } - public static FunctionExpression curtime(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression curtime( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURTIME, args); } - public static FunctionExpression current_time(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression current_time( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURRENT_TIME, args); } - public static FunctionExpression curdate(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression curdate( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURDATE, args); } - public static FunctionExpression current_date(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression current_date( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.CURRENT_DATE, args); } - public static FunctionExpression time_format(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression time_format( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIME_FORMAT, expressions); } @@ -938,38 +928,36 @@ public static FunctionExpression timestampadd(Expression... expressions) { return timestampadd(FunctionProperties.None, expressions); } - public static FunctionExpression timestampadd(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression timestampadd( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIMESTAMPADD, expressions); } - public static FunctionExpression timestampdiff(FunctionProperties functionProperties, - Expression... expressions) { + public static FunctionExpression timestampdiff( + FunctionProperties functionProperties, Expression... expressions) { return compile(functionProperties, BuiltinFunctionName.TIMESTAMPDIFF, expressions); } - - public static FunctionExpression utc_date(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression utc_date( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.UTC_DATE, args); } - public static FunctionExpression utc_time(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression utc_time( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.UTC_TIME, args); } - public static FunctionExpression utc_timestamp(FunctionProperties functionProperties, - Expression... args) { + public static FunctionExpression utc_timestamp( + FunctionProperties functionProperties, Expression... args) { return compile(functionProperties, BuiltinFunctionName.UTC_TIMESTAMP, args); - } @SuppressWarnings("unchecked") - private static - T compile(FunctionProperties functionProperties, - BuiltinFunctionName bfn, Expression... args) { - return (T) BuiltinFunctionRepository.getInstance().compile(functionProperties, - bfn.getName(), Arrays.asList(args)); + private static T compile( + FunctionProperties functionProperties, BuiltinFunctionName bfn, Expression... args) { + return (T) + BuiltinFunctionRepository.getInstance() + .compile(functionProperties, bfn.getName(), Arrays.asList(args)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/Expression.java b/core/src/main/java/org/opensearch/sql/expression/Expression.java index 25a8173efa..e4a5f908e5 100644 --- a/core/src/main/java/org/opensearch/sql/expression/Expression.java +++ b/core/src/main/java/org/opensearch/sql/expression/Expression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import java.io.Serializable; @@ -11,36 +10,28 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.env.Environment; -/** - * The definition of the resolved expression. - */ +/** The definition of the resolved expression. */ public interface Expression extends Serializable { - /** - * Evaluate the value of expression that does not depend on value environment. - */ + /** Evaluate the value of expression that does not depend on value environment. */ default ExprValue valueOf() { return valueOf(null); } - /** - * Evaluate the value of expression in the value environment. - */ + /** Evaluate the value of expression in the value environment. */ ExprValue valueOf(Environment valueEnv); - /** - * The type of the expression. - */ + /** The type of the expression. */ ExprType type(); /** * Accept a visitor to visit current expression node. - * @param visitor visitor - * @param context context - * @param result type - * @param context type - * @return result accumulated by visitor when visiting + * + * @param visitor visitor + * @param context context + * @param result type + * @param context type + * @return result accumulated by visitor when visiting */ T accept(ExpressionNodeVisitor visitor, C context); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java b/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java index e3d4e38674..17dc7f5cd7 100644 --- a/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import org.opensearch.sql.expression.aggregation.Aggregator; @@ -15,8 +14,9 @@ /** * Abstract visitor for expression tree nodes. - * @param type of return value to accumulate when visiting. - * @param type of context. + * + * @param type of return value to accumulate when visiting. + * @param type of context. */ public abstract class ExpressionNodeVisitor { @@ -26,9 +26,10 @@ public T visitNode(Expression node, C context) { /** * Visit children nodes in function arguments. - * @param node function node - * @param context context - * @return result + * + * @param node function node + * @param context context + * @return result */ public T visitChildren(FunctionImplementation node, C context) { T result = defaultResult(); @@ -81,10 +82,13 @@ public T visitNamedAggregator(NamedAggregator node, C context) { } /** - * Call visitFunction() by default rather than visitChildren(). - * This makes CASE/WHEN able to be handled: - * 1) by visitFunction() if not overwritten: ex. FilterQueryBuilder - * 2) by visitCase/When() otherwise if any special logic: ex. ExprReferenceOptimizer + * Call visitFunction() by default rather than visitChildren(). This makes CASE/WHEN able to be + * handled: + * + *
    + *
  1. by visitFunction() if not overwritten: ex. FilterQueryBuilder + *
  2. by visitCase/When() otherwise if any special logic: ex. ExprReferenceOptimizer + *
*/ public T visitCase(CaseClause node, C context) { return visitFunction(node, context); diff --git a/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java b/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java index 2a695f26e6..b67eb38c00 100644 --- a/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/FunctionExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import java.util.List; @@ -14,22 +13,17 @@ import org.opensearch.sql.expression.function.FunctionImplementation; import org.opensearch.sql.expression.function.FunctionName; -/** - * Function Expression. - */ +/** Function Expression. */ @EqualsAndHashCode @RequiredArgsConstructor @ToString public abstract class FunctionExpression implements Expression, FunctionImplementation { - @Getter - private final FunctionName functionName; + @Getter private final FunctionName functionName; - @Getter - private final List arguments; + @Getter private final List arguments; @Override public T accept(ExpressionNodeVisitor visitor, C context) { return visitor.visitFunction(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java b/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java index 804c38a6f7..79cc07f048 100644 --- a/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/HighlightExpression.java @@ -20,9 +20,7 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * Highlight Expression. - */ +/** Highlight Expression. */ @Getter public class HighlightExpression extends FunctionExpression { private final Expression highlightField; @@ -30,17 +28,19 @@ public class HighlightExpression extends FunctionExpression { /** * HighlightExpression Constructor. + * * @param highlightField : Highlight field for expression. */ public HighlightExpression(Expression highlightField) { super(BuiltinFunctionName.HIGHLIGHT.getName(), List.of(highlightField)); this.highlightField = highlightField; - this.type = this.highlightField.toString().contains("*") - ? ExprCoreType.STRUCT : ExprCoreType.ARRAY; + this.type = + this.highlightField.toString().contains("*") ? ExprCoreType.STRUCT : ExprCoreType.ARRAY; } /** * Return collection value matching highlight field. + * * @param valueEnv : Dataset to parse value from. * @return : collection value of highlight fields. */ @@ -57,15 +57,15 @@ public ExprValue valueOf(Environment valueEnv) { // used in conjunction with other highlight calls, we need to ensure // only wildcard regex matching is mapped to wildcard call. if (this.type == ExprCoreType.STRUCT && value.type() == ExprCoreType.STRUCT) { - value = new ExprTupleValue( - new LinkedHashMap(value.tupleValue() - .entrySet() - .stream() - .filter(s -> matchesHighlightRegex(s.getKey(), - StringUtils.unquoteText(highlightField.toString()))) - .collect(Collectors.toMap( - e -> e.getKey(), - e -> e.getValue())))); + value = + new ExprTupleValue( + new LinkedHashMap( + value.tupleValue().entrySet().stream() + .filter( + s -> + matchesHighlightRegex( + s.getKey(), StringUtils.unquoteText(highlightField.toString()))) + .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())))); if (value.tupleValue().isEmpty()) { value = ExprValueUtils.missingValue(); } @@ -76,6 +76,7 @@ public ExprValue valueOf(Environment valueEnv) { /** * Get type for HighlightExpression. + * * @return : Expression type. */ @Override @@ -90,6 +91,7 @@ public T accept(ExpressionNodeVisitor visitor, C context) { /** * Check if field matches the wildcard pattern used in highlight query. + * * @param field Highlight selected field for query * @param pattern Wildcard regex to match field against * @return True if field matches wildcard pattern diff --git a/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java b/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java index adb8e197d1..eba03e8430 100644 --- a/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/LiteralExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import lombok.EqualsAndHashCode; @@ -12,9 +11,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.env.Environment; -/** - * Literal Expression. - */ +/** Literal Expression. */ @EqualsAndHashCode @RequiredArgsConstructor public class LiteralExpression implements Expression { diff --git a/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java b/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java index 0f4601f1bf..c3ce60bd4a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/NamedArgumentExpression.java @@ -13,9 +13,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.env.Environment; -/** - * Named argument expression that represents function argument with name. - */ +/** Named argument expression that represents function argument with name. */ @RequiredArgsConstructor @Getter @EqualsAndHashCode diff --git a/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java b/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java index 26996eb93d..03118311a9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/NamedExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import com.google.common.base.Strings; @@ -16,8 +15,8 @@ import org.opensearch.sql.expression.env.Environment; /** - * Named expression that represents expression with name. - * Please see more details in associated unresolved expression operator + * Named expression that represents expression with name.
+ * Please see more details in associated unresolved expression operator
* {@link org.opensearch.sql.ast.expression.Alias}. */ @AllArgsConstructor @@ -26,19 +25,13 @@ @RequiredArgsConstructor public class NamedExpression implements Expression { - /** - * Expression name. - */ + /** Expression name. */ private final String name; - /** - * Expression that being named. - */ + /** Expression that being named. */ private final Expression delegated; - /** - * Optional alias. - */ + /** Optional alias. */ private String alias; @Override @@ -53,7 +46,8 @@ public ExprType type() { /** * Get expression name using name or its alias (if it's present). - * @return expression name + * + * @return expression name */ public String getNameOrAlias() { return Strings.isNullOrEmpty(alias) ? name : alias; @@ -68,5 +62,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public String toString() { return getNameOrAlias(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java b/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java index 3c5b2af23c..eb510e3b8a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/ReferenceExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression; import static org.opensearch.sql.utils.ExpressionUtils.PATH_SEP; @@ -22,16 +21,15 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ReferenceExpression implements Expression { - @Getter - private final String attr; + @Getter private final String attr; - @Getter - private final List paths; + @Getter private final List paths; private final ExprType type; /** * Constructor of ReferenceExpression. + * * @param ref the field name. e.g. addr.state/addr. * @param type type. */ @@ -63,6 +61,7 @@ public String toString() { } /** + *
    * Resolve the ExprValue from {@link ExprTupleValue} using paths.
    * Considering the following sample data.
    * {
@@ -95,6 +94,7 @@ public String toString() {
    *
    * @param value {@link ExprTupleValue}.
    * @return {@link ExprTupleValue}.
+   * 
*/ public ExprValue resolve(ExprTupleValue value) { return resolve(value, paths); diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java index 345c6c00dd..478e3caf54 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregationState.java @@ -3,18 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.storage.bindingtuple.BindingTuple; -/** - * Maintain the state when {@link Aggregator} iterate on the {@link BindingTuple}. - */ +/** Maintain the state when {@link Aggregator} iterate on the {@link BindingTuple}. */ public interface AggregationState { - /** - * Get {@link ExprValue} result. - */ + /** Get {@link ExprValue} result. */ ExprValue result(); } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java index a122ea6540..a2a3ce76c3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/Aggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import java.util.List; @@ -26,36 +25,34 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * Aggregator which will iterate on the {@link BindingTuple}s to aggregate the result. - * The Aggregator is not well fit into Expression, because it has side effect. - * But we still want to make it implement {@link Expression} interface to make - * {@link ExpressionAnalyzer} easier. + * Aggregator which will iterate on the {@link BindingTuple}s to aggregate the result. The + * Aggregator is not well fit into Expression, because it has side effect. But we still want to make + * it implement {@link Expression} interface to make {@link ExpressionAnalyzer} easier. */ @EqualsAndHashCode @RequiredArgsConstructor public abstract class Aggregator implements FunctionImplementation, Expression { - @Getter - private final FunctionName functionName; - @Getter - private final List arguments; + @Getter private final FunctionName functionName; + @Getter private final List arguments; protected final ExprCoreType returnType; + @Setter @Getter @Accessors(fluent = true) protected Expression condition; + @Setter @Getter @Accessors(fluent = true) protected Boolean distinct = false; - /** - * Create an {@link AggregationState} which will be used for aggregation. - */ + /** Create an {@link AggregationState} which will be used for aggregation. */ public abstract S create(); /** * Iterate on {@link ExprValue}. + * * @param value {@link ExprValue} * @param state {@link AggregationState} * @return {@link AggregationState} @@ -63,9 +60,9 @@ public abstract class Aggregator protected abstract S iterate(ExprValue value, S state); /** - * Let the aggregator iterate on the {@link BindingTuple} - * To filter out ExprValues that are missing, null or cannot satisfy {@link #condition} - * Before the specific aggregator iterating ExprValue in the tuple. + * Let the aggregator iterate on the {@link BindingTuple} To filter out ExprValues that are + * missing, null or cannot satisfy {@link #condition} Before the specific aggregator iterating + * ExprValue in the tuple. * * @param tuple {@link BindingTuple} * @param state {@link AggregationState} @@ -95,14 +92,11 @@ public T accept(ExpressionNodeVisitor visitor, C context) { return visitor.visitAggregator(this, context); } - /** - * Util method to get value of condition in aggregation filter. - */ + /** Util method to get value of condition in aggregation filter. */ public boolean conditionValue(BindingTuple tuple) { if (condition == null) { return true; } return ExprValueUtils.getBooleanValue(condition.valueOf(tuple)); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java index a24eeca1c1..4a1d4d309b 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AggregatorFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -35,12 +34,11 @@ import org.opensearch.sql.expression.function.FunctionSignature; /** - * The definition of aggregator function - * avg, Accepts two numbers and produces a number. - * sum, Accepts two numbers and produces a number. - * max, Accepts two numbers and produces a number. - * min, Accepts two numbers and produces a number. - * count, Accepts two numbers and produces a number. + * The definition of aggregator functions avg, sum, min, max and + * count.
+ * All of them accept a list of numbers and produce a number. avg, min and + * max also accept datetime types.
+ * count accepts values of all types. */ @UtilityClass public class AggregatorFunction { @@ -67,26 +65,37 @@ private static DefaultFunctionResolver avg() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new AvgAggregator(arguments, DOUBLE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new AvgAggregator(arguments, DATE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATETIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATETIME)), (functionProperties, arguments) -> new AvgAggregator(arguments, DATETIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new AvgAggregator(arguments, TIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), (functionProperties, arguments) -> new AvgAggregator(arguments, TIMESTAMP)) - .build() - ); + .build()); } private static DefaultFunctionResolver count() { FunctionName functionName = BuiltinFunctionName.COUNT.getName(); - DefaultFunctionResolver functionResolver = new DefaultFunctionResolver(functionName, - ExprCoreType.coreTypes().stream().collect(Collectors.toMap( - type -> new FunctionSignature(functionName, Collections.singletonList(type)), - type -> (functionProperties, arguments) -> new CountAggregator(arguments, INTEGER)))); + DefaultFunctionResolver functionResolver = + new DefaultFunctionResolver( + functionName, + ExprCoreType.coreTypes().stream() + .collect( + Collectors.toMap( + type -> + new FunctionSignature(functionName, Collections.singletonList(type)), + type -> + (functionProperties, arguments) -> + new CountAggregator(arguments, INTEGER)))); return functionResolver; } @@ -95,16 +104,19 @@ private static DefaultFunctionResolver sum() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(INTEGER)), + .put( + new FunctionSignature(functionName, Collections.singletonList(INTEGER)), (functionProperties, arguments) -> new SumAggregator(arguments, INTEGER)) - .put(new FunctionSignature(functionName, Collections.singletonList(LONG)), + .put( + new FunctionSignature(functionName, Collections.singletonList(LONG)), (functionProperties, arguments) -> new SumAggregator(arguments, LONG)) - .put(new FunctionSignature(functionName, Collections.singletonList(FLOAT)), + .put( + new FunctionSignature(functionName, Collections.singletonList(FLOAT)), (functionProperties, arguments) -> new SumAggregator(arguments, FLOAT)) - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new SumAggregator(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver min() { @@ -112,23 +124,32 @@ private static DefaultFunctionResolver min() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(INTEGER)), + .put( + new FunctionSignature(functionName, Collections.singletonList(INTEGER)), (functionProperties, arguments) -> new MinAggregator(arguments, INTEGER)) - .put(new FunctionSignature(functionName, Collections.singletonList(LONG)), + .put( + new FunctionSignature(functionName, Collections.singletonList(LONG)), (functionProperties, arguments) -> new MinAggregator(arguments, LONG)) - .put(new FunctionSignature(functionName, Collections.singletonList(FLOAT)), + .put( + new FunctionSignature(functionName, Collections.singletonList(FLOAT)), (functionProperties, arguments) -> new MinAggregator(arguments, FLOAT)) - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new MinAggregator(arguments, DOUBLE)) - .put(new FunctionSignature(functionName, Collections.singletonList(STRING)), + .put( + new FunctionSignature(functionName, Collections.singletonList(STRING)), (functionProperties, arguments) -> new MinAggregator(arguments, STRING)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MinAggregator(arguments, DATE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATETIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATETIME)), (functionProperties, arguments) -> new MinAggregator(arguments, DATETIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MinAggregator(arguments, TIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), (functionProperties, arguments) -> new MinAggregator(arguments, TIMESTAMP)) .build()); } @@ -138,26 +159,34 @@ private static DefaultFunctionResolver max() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(INTEGER)), + .put( + new FunctionSignature(functionName, Collections.singletonList(INTEGER)), (functionProperties, arguments) -> new MaxAggregator(arguments, INTEGER)) - .put(new FunctionSignature(functionName, Collections.singletonList(LONG)), + .put( + new FunctionSignature(functionName, Collections.singletonList(LONG)), (functionProperties, arguments) -> new MaxAggregator(arguments, LONG)) - .put(new FunctionSignature(functionName, Collections.singletonList(FLOAT)), + .put( + new FunctionSignature(functionName, Collections.singletonList(FLOAT)), (functionProperties, arguments) -> new MaxAggregator(arguments, FLOAT)) - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> new MaxAggregator(arguments, DOUBLE)) - .put(new FunctionSignature(functionName, Collections.singletonList(STRING)), + .put( + new FunctionSignature(functionName, Collections.singletonList(STRING)), (functionProperties, arguments) -> new MaxAggregator(arguments, STRING)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATE)), (functionProperties, arguments) -> new MaxAggregator(arguments, DATE)) - .put(new FunctionSignature(functionName, Collections.singletonList(DATETIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DATETIME)), (functionProperties, arguments) -> new MaxAggregator(arguments, DATETIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIME)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIME)), (functionProperties, arguments) -> new MaxAggregator(arguments, TIME)) - .put(new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), + .put( + new FunctionSignature(functionName, Collections.singletonList(TIMESTAMP)), (functionProperties, arguments) -> new MaxAggregator(arguments, TIMESTAMP)) - .build() - ); + .build()); } private static DefaultFunctionResolver varSamp() { @@ -165,10 +194,10 @@ private static DefaultFunctionResolver varSamp() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> varianceSample(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver varPop() { @@ -176,10 +205,10 @@ private static DefaultFunctionResolver varPop() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> variancePopulation(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver stddevSamp() { @@ -187,10 +216,10 @@ private static DefaultFunctionResolver stddevSamp() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> stddevSample(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver stddevPop() { @@ -198,20 +227,22 @@ private static DefaultFunctionResolver stddevPop() { return new DefaultFunctionResolver( functionName, new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), + .put( + new FunctionSignature(functionName, Collections.singletonList(DOUBLE)), (functionProperties, arguments) -> stddevPopulation(arguments, DOUBLE)) - .build() - ); + .build()); } private static DefaultFunctionResolver take() { FunctionName functionName = BuiltinFunctionName.TAKE.getName(); - DefaultFunctionResolver functionResolver = new DefaultFunctionResolver(functionName, - new ImmutableMap.Builder() - .put(new FunctionSignature(functionName, ImmutableList.of(STRING, INTEGER)), - (functionProperties, arguments) -> new TakeAggregator(arguments, ARRAY)) - .build()); + DefaultFunctionResolver functionResolver = + new DefaultFunctionResolver( + functionName, + new ImmutableMap.Builder() + .put( + new FunctionSignature(functionName, ImmutableList.of(STRING, INTEGER)), + (functionProperties, arguments) -> new TakeAggregator(arguments, ARRAY)) + .build()); return functionResolver; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java index a899a6b45b..c528968018 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/AvgAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static java.time.temporal.ChronoUnit.MILLIS; @@ -27,14 +26,14 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The average aggregator aggregate the value evaluated by the expression. - * If the expression evaluated result is NULL or MISSING, then the result is NULL. + * The average aggregator aggregate the value evaluated by the expression. If the expression + * evaluated result is NULL or MISSING, then the result is NULL. */ public class AvgAggregator extends Aggregator { /** - * To process by different ways different data types, we need to store the type. - * Input data has the same type as the result. + * To process by different ways different data types, we need to store the type. Input data has + * the same type as the result. */ private final ExprCoreType dataType; @@ -56,7 +55,7 @@ public AvgState create() { return new TimeAvgState(); case DOUBLE: return new DoubleAvgState(); - default: //unreachable code - we don't expose signatures for unsupported types + default: // unreachable code - we don't expose signatures for unsupported types throw new IllegalArgumentException( String.format("avg aggregation over %s type is not supported", dataType)); } @@ -72,9 +71,7 @@ public String toString() { return String.format(Locale.ROOT, "avg(%s)", format(getArguments())); } - /** - * Average State. - */ + /** Average State. */ protected abstract static class AvgState implements AggregationState { protected ExprValue count; protected ExprValue total; @@ -117,15 +114,16 @@ public ExprValue result() { } return new ExprDateValue( - new ExprTimestampValue(Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) - .dateValue()); + new ExprTimestampValue( + Instant.ofEpochMilli( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) + .dateValue()); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())) - .valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); return super.iterate(value); } } @@ -138,15 +136,16 @@ public ExprValue result() { } return new ExprDatetimeValue( - new ExprTimestampValue(Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) - .datetimeValue()); + new ExprTimestampValue( + Instant.ofEpochMilli( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())) + .datetimeValue()); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())) - .valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); return super.iterate(value); } } @@ -158,14 +157,15 @@ public ExprValue result() { return ExprNullValue.of(); } - return new ExprTimestampValue(Instant.ofEpochMilli( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())); + return new ExprTimestampValue( + Instant.ofEpochMilli( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue())); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())) - .valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(value.timestampValue().toEpochMilli())).valueOf(); return super.iterate(value); } } @@ -177,14 +177,16 @@ public ExprValue result() { return ExprNullValue.of(); } - return new ExprTimeValue(LocalTime.MIN.plus( - DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue(), MILLIS)); + return new ExprTimeValue( + LocalTime.MIN.plus( + DSL.divide(DSL.literal(total), DSL.literal(count)).valueOf().longValue(), MILLIS)); } @Override protected AvgState iterate(ExprValue value) { - total = DSL.add(DSL.literal(total), - DSL.literal(MILLIS.between(LocalTime.MIN, value.timeValue()))).valueOf(); + total = + DSL.add(DSL.literal(total), DSL.literal(MILLIS.between(LocalTime.MIN, value.timeValue()))) + .valueOf(); return super.iterate(value); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java index 813842cadc..c4c02eb1d3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/CountAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.utils.ExpressionUtils.format; @@ -43,9 +42,7 @@ public String toString() { : String.format(Locale.ROOT, "count(%s)", format(getArguments())); } - /** - * Count State. - */ + /** Count State. */ protected static class CountState implements AggregationState { protected int count; diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java index e9123c0ac2..863d4603a7 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/MaxAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; @@ -45,9 +44,7 @@ protected static class MaxState implements AggregationState { } public void max(ExprValue value) { - maxResult = maxResult.isNull() ? value - : (maxResult.compareTo(value) > 0) - ? maxResult : value; + maxResult = maxResult.isNull() ? value : (maxResult.compareTo(value) > 0) ? maxResult : value; } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java index 897fe857ff..c1b9ebcd4f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/MinAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; @@ -16,8 +15,8 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The minimum aggregator aggregate the value evaluated by the expression. - * If the expression evaluated result is NULL or MISSING, then the result is NULL. + * The minimum aggregator aggregate the value evaluated by the expression. If the expression + * evaluated result is NULL or MISSING, then the result is NULL. */ public class MinAggregator extends Aggregator { @@ -25,7 +24,6 @@ public MinAggregator(List arguments, ExprCoreType returnType) { super(BuiltinFunctionName.MIN.getName(), arguments, returnType); } - @Override public MinState create() { return new MinState(); @@ -50,9 +48,7 @@ protected static class MinState implements AggregationState { } public void min(ExprValue value) { - minResult = minResult.isNull() ? value - : (minResult.compareTo(value) < 0) - ? minResult : value; + minResult = minResult.isNull() ? value : (minResult.compareTo(value) < 0) ? minResult : value; } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java index 510c5d1e45..fa84b74ba5 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/NamedAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import lombok.EqualsAndHashCode; @@ -12,35 +11,27 @@ import org.opensearch.sql.expression.ExpressionNodeVisitor; /** - * NamedAggregator expression that represents expression with name. - * Please see more details in associated unresolved expression operator + * NamedAggregator expression that represents expression with name. Please see more details in + * associated unresolved expression operator
* {@link org.opensearch.sql.ast.expression.Alias}. */ @EqualsAndHashCode(callSuper = false) public class NamedAggregator extends Aggregator { - /** - * Aggregator name. - */ + /** Aggregator name. */ private final String name; - /** - * Aggregator that being named. - */ - @Getter - private final Aggregator delegated; + /** Aggregator that being named. */ + @Getter private final Aggregator delegated; /** - * NamedAggregator. - * The aggregator properties {@link #condition} and {@link #distinct} - * are inherited by named aggregator to avoid errors introduced by the property inconsistency. + * NamedAggregator. The aggregator properties {@link #condition} and {@link #distinct} are + * inherited by named aggregator to avoid errors introduced by the property inconsistency. * * @param name name * @param delegated delegated */ - public NamedAggregator( - String name, - Aggregator delegated) { + public NamedAggregator(String name, Aggregator delegated) { super(delegated.getFunctionName(), delegated.getArguments(), delegated.returnType); this.name = name; this.delegated = delegated; @@ -60,7 +51,8 @@ protected AggregationState iterate(ExprValue value, AggregationState state) { /** * Get expression name using name or its alias (if it's present). - * @return expression name + * + * @return expression name */ public String getName() { return name; @@ -75,5 +67,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public String toString() { return getName(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java index 0cd8494449..d5422bc788 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/StdDevAggregator.java @@ -26,26 +26,18 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * StandardDeviation Aggregator. - */ +/** StandardDeviation Aggregator. */ public class StdDevAggregator extends Aggregator { private final boolean isSampleStdDev; - /** - * Build Population Variance {@link VarianceAggregator}. - */ - public static Aggregator stddevPopulation(List arguments, - ExprCoreType returnType) { + /** Build Population Variance {@link VarianceAggregator}. */ + public static Aggregator stddevPopulation(List arguments, ExprCoreType returnType) { return new StdDevAggregator(false, arguments, returnType); } - /** - * Build Sample Variance {@link VarianceAggregator}. - */ - public static Aggregator stddevSample(List arguments, - ExprCoreType returnType) { + /** Build Sample Variance {@link VarianceAggregator}. */ + public static Aggregator stddevSample(List arguments, ExprCoreType returnType) { return new StdDevAggregator(true, arguments, returnType); } @@ -53,7 +45,7 @@ public static Aggregator stddevSample(List arguments, * VarianceAggregator constructor. * * @param isSampleStdDev true for sample standard deviation aggregator, false for population - * standard deviation aggregator. + * standard deviation aggregator. * @param arguments aggregator arguments. * @param returnType aggregator return types. */ @@ -74,8 +66,8 @@ public StdDevAggregator.StdDevState create() { } @Override - protected StdDevAggregator.StdDevState iterate(ExprValue value, - StdDevAggregator.StdDevState state) { + protected StdDevAggregator.StdDevState iterate( + ExprValue value, StdDevAggregator.StdDevState state) { state.evaluate(value); return state; } diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java index f5b042034a..d637721980 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/SumAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.data.model.ExprValueUtils.doubleValue; @@ -28,8 +27,8 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The sum aggregator aggregate the value evaluated by the expression. - * If the expression evaluated result is NULL or MISSING, then the result is NULL. + * The sum aggregator aggregate the value evaluated by the expression. If the expression evaluated + * result is NULL or MISSING, then the result is NULL. */ public class SumAggregator extends Aggregator { @@ -54,9 +53,7 @@ public String toString() { return String.format(Locale.ROOT, "sum(%s)", format(getArguments())); } - /** - * Sum State. - */ + /** Sum State. */ protected static class SumState implements AggregationState { private final ExprCoreType type; @@ -69,9 +66,7 @@ protected static class SumState implements AggregationState { isEmptyCollection = true; } - /** - * Add value to current sumResult. - */ + /** Add value to current sumResult. */ public void add(ExprValue value) { switch (type) { case INTEGER: diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java index cff08bb098..8791973353 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/TakeAggregator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.aggregation; import static org.opensearch.sql.utils.ExpressionUtils.format; @@ -18,8 +17,8 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; /** - * The take aggregator keeps and returns the original values of a field. - * If the field value is NULL or MISSING, then it is skipped. + * The take aggregator keeps and returns the original values of a field. If the field value is NULL + * or MISSING, then it is skipped. */ public class TakeAggregator extends Aggregator { @@ -43,9 +42,7 @@ public String toString() { return String.format(Locale.ROOT, "take(%s)", format(getArguments())); } - /** - * Take State. - */ + /** Take State. */ protected static class TakeState implements AggregationState { protected int index; protected int size; diff --git a/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java b/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java index bd9f0948f6..920830d266 100644 --- a/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java +++ b/core/src/main/java/org/opensearch/sql/expression/aggregation/VarianceAggregator.java @@ -26,26 +26,18 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * Variance Aggregator. - */ +/** Variance Aggregator. */ public class VarianceAggregator extends Aggregator { private final boolean isSampleVariance; - /** - * Build Population Variance {@link VarianceAggregator}. - */ - public static Aggregator variancePopulation(List arguments, - ExprCoreType returnType) { + /** Build Population Variance {@link VarianceAggregator}. */ + public static Aggregator variancePopulation(List arguments, ExprCoreType returnType) { return new VarianceAggregator(false, arguments, returnType); } - /** - * Build Sample Variance {@link VarianceAggregator}. - */ - public static Aggregator varianceSample(List arguments, - ExprCoreType returnType) { + /** Build Sample Variance {@link VarianceAggregator}. */ + public static Aggregator varianceSample(List arguments, ExprCoreType returnType) { return new VarianceAggregator(true, arguments, returnType); } diff --git a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java index ad7860a6dc..e18f33c6ae 100644 --- a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java +++ b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/CaseClause.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.conditional.cases; import static org.opensearch.sql.data.type.ExprCoreType.UNDEFINED; @@ -32,19 +31,13 @@ @ToString public class CaseClause extends FunctionExpression { - /** - * List of WHEN clauses. - */ + /** List of WHEN clauses. */ private final List whenClauses; - /** - * Default result if none of WHEN conditions match. - */ + /** Default result if none of WHEN conditions match. */ private final Expression defaultResult; - /** - * Initialize case clause. - */ + /** Initialize case clause. */ public CaseClause(List whenClauses, Expression defaultResult) { super(FunctionName.of("case"), concatArgs(whenClauses, defaultResult)); this.whenClauses = whenClauses; @@ -75,15 +68,13 @@ public T accept(ExpressionNodeVisitor visitor, C context) { } /** - * Get types of each result in WHEN clause and ELSE clause. - * Exclude UNKNOWN type from NULL literal which means NULL in THEN or ELSE clause - * is not included in result. + * Get types of each result in WHEN clause and ELSE clause. Exclude UNKNOWN type from NULL literal + * which means NULL in THEN or ELSE clause is not included in result. + * * @return all result types. Use list so caller can generate friendly error message. */ public List allResultTypes() { - List types = whenClauses.stream() - .map(WhenClause::type) - .collect(Collectors.toList()); + List types = whenClauses.stream().map(WhenClause::type).collect(Collectors.toList()); if (defaultResult != null) { types.add(defaultResult.type()); } @@ -92,8 +83,8 @@ public List allResultTypes() { return types; } - private static List concatArgs(List whenClauses, - Expression defaultResult) { + private static List concatArgs( + List whenClauses, Expression defaultResult) { ImmutableList.Builder args = ImmutableList.builder(); whenClauses.forEach(args::add); @@ -102,5 +93,4 @@ private static List concatArgs(List whenClauses, } return args.build(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java index fd2eeab983..7eb731a1f2 100644 --- a/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java +++ b/core/src/main/java/org/opensearch/sql/expression/conditional/cases/WhenClause.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.conditional.cases; import com.google.common.collect.ImmutableList; @@ -18,27 +17,19 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.FunctionName; -/** - * WHEN clause that consists of a condition and a result corresponding. - */ +/** WHEN clause that consists of a condition and a result corresponding. */ @EqualsAndHashCode(callSuper = false) @Getter @ToString public class WhenClause extends FunctionExpression { - /** - * Condition that must be a predicate. - */ + /** Condition that must be a predicate. */ private final Expression condition; - /** - * Result to return if condition is evaluated to true. - */ + /** Result to return if condition is evaluated to true. */ private final Expression result; - /** - * Initialize when clause. - */ + /** Initialize when clause. */ public WhenClause(Expression condition, Expression result) { super(FunctionName.of("when"), ImmutableList.of(condition, result)); this.condition = condition; @@ -47,8 +38,9 @@ public WhenClause(Expression condition, Expression result) { /** * Evaluate when condition. - * @param valueEnv value env - * @return is condition satisfied + * + * @param valueEnv value env + * @return is condition satisfied */ public boolean isTrue(Environment valueEnv) { ExprValue result = condition.valueOf(valueEnv); @@ -72,5 +64,4 @@ public ExprType type() { public T accept(ExpressionNodeVisitor visitor, C context) { return visitor.visitWhen(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java b/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java index c5b6343991..0837075f7c 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/CalendarLookup.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import com.google.common.collect.ImmutableList; @@ -18,6 +17,7 @@ class CalendarLookup { /** * Get a calendar for the specific mode. + * * @param mode Mode to get calendar for. * @param date Date to get calendar for. */ @@ -38,6 +38,7 @@ private static Calendar getCalendar(int mode, LocalDate date) { /** * Set first day of week, minimal days in first week and date in calendar. + * * @param firstDayOfWeek the given first day of the week. * @param minimalDaysInWeek the given minimal days required in the first week of the year. * @param date the given date. @@ -52,6 +53,7 @@ private static Calendar getCalendar(int firstDayOfWeek, int minimalDaysInWeek, L /** * Returns week number for date according to mode. + * * @param mode Integer for mode. Valid mode values are 0 to 7. * @param date LocalDate for date. */ @@ -68,6 +70,7 @@ static int getWeekNumber(int mode, LocalDate date) { /** * Returns year for date according to mode. + * * @param mode Integer for mode. Valid mode values are 0 to 7. * @param date LocalDate for date. */ diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java index 55bfa67f3f..13f9a077e4 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java @@ -28,8 +28,8 @@ import org.opensearch.sql.expression.function.FunctionProperties; /** - * This class converts a SQL style DATE_FORMAT format specifier and converts it to a - * Java SimpleDateTime format. + * This class converts a SQL style DATE_FORMAT format specifier and converts it to a Java + * SimpleDateTime format. */ class DateTimeFormatterUtil { private static final int SUFFIX_SPECIAL_START_TH = 11; @@ -39,8 +39,7 @@ class DateTimeFormatterUtil { private static final String NANO_SEC_FORMAT = "'%06d'"; private static final Map SUFFIX_CONVERTER = - ImmutableMap.builder() - .put(1, "st").put(2, "nd").put(3, "rd").build(); + ImmutableMap.builder().put(1, "st").put(2, "nd").put(3, "rd").build(); // The following have special cases that need handling outside of the format options provided // by the DateTimeFormatter class. @@ -50,52 +49,70 @@ interface DateTimeFormatHandler { private static final Map DATE_HANDLERS = ImmutableMap.builder() - .put("%a", (date) -> "EEE") // %a => EEE - Abbreviated weekday name (Sun..Sat) - .put("%b", (date) -> "LLL") // %b => LLL - Abbreviated month name (Jan..Dec) - .put("%c", (date) -> "MM") // %c => MM - Month, numeric (0..12) - .put("%d", (date) -> "dd") // %d => dd - Day of the month, numeric (00..31) - .put("%e", (date) -> "d") // %e => d - Day of the month, numeric (0..31) - .put("%H", (date) -> "HH") // %H => HH - (00..23) - .put("%h", (date) -> "hh") // %h => hh - (01..12) - .put("%I", (date) -> "hh") // %I => hh - (01..12) - .put("%i", (date) -> "mm") // %i => mm - Minutes, numeric (00..59) - .put("%j", (date) -> "DDD") // %j => DDD - (001..366) - .put("%k", (date) -> "H") // %k => H - (0..23) - .put("%l", (date) -> "h") // %l => h - (1..12) - .put("%p", (date) -> "a") // %p => a - AM or PM - .put("%M", (date) -> "LLLL") // %M => LLLL - Month name (January..December) - .put("%m", (date) -> "MM") // %m => MM - Month, numeric (00..12) - .put("%r", (date) -> "hh:mm:ss a") // %r => hh:mm:ss a - hh:mm:ss followed by AM or PM - .put("%S", (date) -> "ss") // %S => ss - Seconds (00..59) - .put("%s", (date) -> "ss") // %s => ss - Seconds (00..59) - .put("%T", (date) -> "HH:mm:ss") // %T => HH:mm:ss - .put("%W", (date) -> "EEEE") // %W => EEEE - Weekday name (Sunday..Saturday) - .put("%Y", (date) -> "yyyy") // %Y => yyyy - Year, numeric, 4 digits - .put("%y", (date) -> "yy") // %y => yy - Year, numeric, 2 digits - // The following are not directly supported by DateTimeFormatter. - .put("%D", (date) -> // %w - Day of month with English suffix - String.format("'%d%s'", date.getDayOfMonth(), getSuffix(date.getDayOfMonth()))) - .put("%f", (date) -> // %f - Microseconds - String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) - .put("%w", (date) -> // %w - Day of week (0 indexed) - String.format("'%d'", date.getDayOfWeek().getValue())) - .put("%U", (date) -> // %U Week where Sunday is the first day - WEEK() mode 0 - String.format("'%d'", CalendarLookup.getWeekNumber(0, date.toLocalDate()))) - .put("%u", (date) -> // %u Week where Monday is the first day - WEEK() mode 1 - String.format("'%d'", CalendarLookup.getWeekNumber(1, date.toLocalDate()))) - .put("%V", (date) -> // %V Week where Sunday is the first day - WEEK() mode 2 used with %X - String.format("'%d'", CalendarLookup.getWeekNumber(2, date.toLocalDate()))) - .put("%v", (date) -> // %v Week where Monday is the first day - WEEK() mode 3 used with %x - String.format("'%d'", CalendarLookup.getWeekNumber(3, date.toLocalDate()))) - .put("%X", (date) -> // %X Year for week where Sunday is the first day, 4 digits used with %V - String.format("'%d'", CalendarLookup.getYearNumber(2, date.toLocalDate()))) - .put("%x", (date) -> // %x Year for week where Monday is the first day, 4 digits used with %v - String.format("'%d'", CalendarLookup.getYearNumber(3, date.toLocalDate()))) - .build(); - - //Handlers for the time_format function. - //Some format specifiers return 0 or null to align with MySQL. - //https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_time-format + .put("%a", (date) -> "EEE") // %a => EEE - Abbreviated weekday name (Sun..Sat) + .put("%b", (date) -> "LLL") // %b => LLL - Abbreviated month name (Jan..Dec) + .put("%c", (date) -> "MM") // %c => MM - Month, numeric (0..12) + .put("%d", (date) -> "dd") // %d => dd - Day of the month, numeric (00..31) + .put("%e", (date) -> "d") // %e => d - Day of the month, numeric (0..31) + .put("%H", (date) -> "HH") // %H => HH - (00..23) + .put("%h", (date) -> "hh") // %h => hh - (01..12) + .put("%I", (date) -> "hh") // %I => hh - (01..12) + .put("%i", (date) -> "mm") // %i => mm - Minutes, numeric (00..59) + .put("%j", (date) -> "DDD") // %j => DDD - (001..366) + .put("%k", (date) -> "H") // %k => H - (0..23) + .put("%l", (date) -> "h") // %l => h - (1..12) + .put("%p", (date) -> "a") // %p => a - AM or PM + .put("%M", (date) -> "LLLL") // %M => LLLL - Month name (January..December) + .put("%m", (date) -> "MM") // %m => MM - Month, numeric (00..12) + .put("%r", (date) -> "hh:mm:ss a") // %r => hh:mm:ss a - hh:mm:ss followed by AM or PM + .put("%S", (date) -> "ss") // %S => ss - Seconds (00..59) + .put("%s", (date) -> "ss") // %s => ss - Seconds (00..59) + .put("%T", (date) -> "HH:mm:ss") // %T => HH:mm:ss + .put("%W", (date) -> "EEEE") // %W => EEEE - Weekday name (Sunday..Saturday) + .put("%Y", (date) -> "yyyy") // %Y => yyyy - Year, numeric, 4 digits + .put("%y", (date) -> "yy") // %y => yy - Year, numeric, 2 digits + // The following are not directly supported by DateTimeFormatter. + .put( + "%D", + (date) -> // %w - Day of month with English suffix + String.format("'%d%s'", date.getDayOfMonth(), getSuffix(date.getDayOfMonth()))) + .put( + "%f", + (date) -> // %f - Microseconds + String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) + .put( + "%w", + (date) -> // %w - Day of week (0 indexed) + String.format("'%d'", date.getDayOfWeek().getValue())) + .put( + "%U", + (date) -> // %U Week where Sunday is the first day - WEEK() mode 0 + String.format("'%d'", CalendarLookup.getWeekNumber(0, date.toLocalDate()))) + .put( + "%u", + (date) -> // %u Week where Monday is the first day - WEEK() mode 1 + String.format("'%d'", CalendarLookup.getWeekNumber(1, date.toLocalDate()))) + .put( + "%V", + (date) -> // %V Week where Sunday is the first day - WEEK() mode 2 used with %X + String.format("'%d'", CalendarLookup.getWeekNumber(2, date.toLocalDate()))) + .put( + "%v", + (date) -> // %v Week where Monday is the first day - WEEK() mode 3 used with %x + String.format("'%d'", CalendarLookup.getWeekNumber(3, date.toLocalDate()))) + .put( + "%X", + (date) -> // %X Year for week where Sunday is the first day, 4 digits used with %V + String.format("'%d'", CalendarLookup.getYearNumber(2, date.toLocalDate()))) + .put( + "%x", + (date) -> // %x Year for week where Monday is the first day, 4 digits used with %v + String.format("'%d'", CalendarLookup.getYearNumber(3, date.toLocalDate()))) + .build(); + + // Handlers for the time_format function. + // Some format specifiers return 0 or null to align with MySQL. + // https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_time-format private static final Map TIME_HANDLERS = ImmutableMap.builder() .put("%a", (date) -> null) @@ -121,8 +138,8 @@ interface DateTimeFormatHandler { .put("%Y", (date) -> "0000") .put("%y", (date) -> "00") .put("%D", (date) -> null) - .put("%f", (date) -> // %f - Microseconds - String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) + // %f - Microseconds + .put("%f", (date) -> String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) .put("%w", (date) -> null) .put("%U", (date) -> null) .put("%u", (date) -> null) @@ -157,7 +174,7 @@ interface DateTimeFormatHandler { .put("%Y", "u") // %Y => yyyy - Year, numeric, 4 digits .put("%y", "u") // %y => yy - Year, numeric, 2 digits .put("%f", "n") // %f => n - Nanoseconds - //The following have been implemented but cannot be aligned with + // The following have been implemented but cannot be aligned with // MySQL due to the limitations of the DatetimeFormatter .put("%D", "d") // %w - Day of month with English suffix .put("%w", "e") // %w - Day of week (0 indexed) @@ -170,20 +187,19 @@ interface DateTimeFormatHandler { .build(); private static final Pattern pattern = Pattern.compile("%."); - private static final Pattern CHARACTERS_WITH_NO_MOD_LITERAL_BEHIND_PATTERN - = Pattern.compile("(? handler, - LocalDateTime datetime) { + static ExprValue getFormattedString( + ExprValue formatExpr, Map handler, LocalDateTime datetime) { StringBuffer cleanFormat = getCleanFormat(formatExpr); final Matcher matcher = pattern.matcher(cleanFormat.toString()); final StringBuffer format = new StringBuffer(); try { while (matcher.find()) { - matcher.appendReplacement(format, - handler.getOrDefault(matcher.group(), (d) -> - String.format("'%s'", matcher.group().replaceFirst(MOD_LITERAL, ""))) + matcher.appendReplacement( + format, + handler + .getOrDefault( + matcher.group(), + (d) -> String.format("'%s'", matcher.group().replaceFirst(MOD_LITERAL, ""))) .getFormat(datetime)); } } catch (Exception e) { @@ -219,12 +238,13 @@ static ExprValue getFormattedString(ExprValue formatExpr, // English Locale matches SQL requirements. // 'AM'/'PM' instead of 'a.m.'/'p.m.' // 'Sat' instead of 'Sat.' etc - return new ExprStringValue(datetime.format( - DateTimeFormatter.ofPattern(format.toString(), Locale.ENGLISH))); + return new ExprStringValue( + datetime.format(DateTimeFormatter.ofPattern(format.toString(), Locale.ENGLISH))); } /** * Format the date using the date format String. + * * @param dateExpr the date ExprValue of Date/Datetime/Timestamp/String type. * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. @@ -242,13 +262,14 @@ static ExprValue getFormattedDateOfToday(ExprValue formatExpr, ExprValue time, C /** * Format the date using the date format String. + * * @param timeExpr the date ExprValue of Date/Datetime/Timestamp/String type. * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. */ static ExprValue getFormattedTime(ExprValue timeExpr, ExprValue formatExpr) { - //Initializes DateTime with LocalDate.now(). This is safe because the date is ignored. - //The time_format function will only return 0 or null for invalid string format specifiers. + // Initializes DateTime with LocalDate.now(). This is safe because the date is ignored. + // The time_format function will only return 0 or null for invalid string format specifiers. final LocalDateTime time = LocalDateTime.of(LocalDate.now(), timeExpr.timeValue()); return getFormattedString(formatExpr, TIME_HANDLERS, time); @@ -266,30 +287,33 @@ private static boolean canGetTime(TemporalAccessor ta) { && ta.isSupported(ChronoField.SECOND_OF_MINUTE)); } - static ExprValue parseStringWithDateOrTime(FunctionProperties fp, - ExprValue datetimeStringExpr, - ExprValue formatExpr) { + static ExprValue parseStringWithDateOrTime( + FunctionProperties fp, ExprValue datetimeStringExpr, ExprValue formatExpr) { - //Replace patterns with % for Java DateTimeFormatter + // Replace patterns with % for Java DateTimeFormatter StringBuffer cleanFormat = getCleanFormat(formatExpr); final Matcher matcher = pattern.matcher(cleanFormat.toString()); final StringBuffer format = new StringBuffer(); while (matcher.find()) { - matcher.appendReplacement(format, - STR_TO_DATE_FORMATS.getOrDefault(matcher.group(), + matcher.appendReplacement( + format, + STR_TO_DATE_FORMATS.getOrDefault( + matcher.group(), String.format("'%s'", matcher.group().replaceFirst(MOD_LITERAL, "")))); } matcher.appendTail(format); TemporalAccessor taWithMissingFields; - //Return NULL for invalid parse in string to align with MySQL + // Return NULL for invalid parse in string to align with MySQL try { - //Get Temporal Accessor to initially parse string without default values - taWithMissingFields = new DateTimeFormatterBuilder() - .appendPattern(format.toString()) - .toFormatter().withResolverStyle(ResolverStyle.STRICT) - .parseUnresolved(datetimeStringExpr.stringValue(), new ParsePosition(0)); + // Get Temporal Accessor to initially parse string without default values + taWithMissingFields = + new DateTimeFormatterBuilder() + .appendPattern(format.toString()) + .toFormatter() + .withResolverStyle(ResolverStyle.STRICT) + .parseUnresolved(datetimeStringExpr.stringValue(), new ParsePosition(0)); if (taWithMissingFields == null) { throw new DateTimeException("Input string could not be parsed properly."); } @@ -300,31 +324,42 @@ static ExprValue parseStringWithDateOrTime(FunctionProperties fp, return ExprNullValue.of(); } - int year = taWithMissingFields.isSupported(ChronoField.YEAR) - ? taWithMissingFields.get(ChronoField.YEAR) : 2000; - - int month = taWithMissingFields.isSupported(ChronoField.MONTH_OF_YEAR) - ? taWithMissingFields.get(ChronoField.MONTH_OF_YEAR) : 1; - - int day = taWithMissingFields.isSupported(ChronoField.DAY_OF_MONTH) - ? taWithMissingFields.get(ChronoField.DAY_OF_MONTH) : 1; - - int hour = taWithMissingFields.isSupported(ChronoField.HOUR_OF_DAY) - ? taWithMissingFields.get(ChronoField.HOUR_OF_DAY) : 0; - - int minute = taWithMissingFields.isSupported(ChronoField.MINUTE_OF_HOUR) - ? taWithMissingFields.get(ChronoField.MINUTE_OF_HOUR) : 0; - - int second = taWithMissingFields.isSupported(ChronoField.SECOND_OF_MINUTE) - ? taWithMissingFields.get(ChronoField.SECOND_OF_MINUTE) : 0; - - //Fill returned datetime with current date if only Time information was parsed + int year = + taWithMissingFields.isSupported(ChronoField.YEAR) + ? taWithMissingFields.get(ChronoField.YEAR) + : 2000; + + int month = + taWithMissingFields.isSupported(ChronoField.MONTH_OF_YEAR) + ? taWithMissingFields.get(ChronoField.MONTH_OF_YEAR) + : 1; + + int day = + taWithMissingFields.isSupported(ChronoField.DAY_OF_MONTH) + ? taWithMissingFields.get(ChronoField.DAY_OF_MONTH) + : 1; + + int hour = + taWithMissingFields.isSupported(ChronoField.HOUR_OF_DAY) + ? taWithMissingFields.get(ChronoField.HOUR_OF_DAY) + : 0; + + int minute = + taWithMissingFields.isSupported(ChronoField.MINUTE_OF_HOUR) + ? taWithMissingFields.get(ChronoField.MINUTE_OF_HOUR) + : 0; + + int second = + taWithMissingFields.isSupported(ChronoField.SECOND_OF_MINUTE) + ? taWithMissingFields.get(ChronoField.SECOND_OF_MINUTE) + : 0; + + // Fill returned datetime with current date if only Time information was parsed LocalDateTime output; if (!canGetDate(taWithMissingFields)) { - output = LocalDateTime.of( - LocalDate.now(fp.getQueryStartClock()), - LocalTime.of(hour, minute, second) - ); + output = + LocalDateTime.of( + LocalDate.now(fp.getQueryStartClock()), LocalTime.of(hour, minute, second)); } else { output = LocalDateTime.of(year, month, day, hour, minute, second); } @@ -334,6 +369,7 @@ static ExprValue parseStringWithDateOrTime(FunctionProperties fp, /** * Returns English suffix of incoming value. + * * @param val Incoming value. * @return English suffix as String (st, nd, rd, th) */ diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java index cd5ef23d1c..d17d59d358 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunction.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; - import static java.time.temporal.ChronoUnit.DAYS; import static java.time.temporal.ChronoUnit.HOURS; import static java.time.temporal.ChronoUnit.MICROS; @@ -100,14 +98,13 @@ import org.opensearch.sql.utils.DateTimeUtils; /** - * The definition of date and time functions. - * 1) have the clear interface for function define. - * 2) the implementation should rely on ExprValue. + * The definition of date and time functions. 1) have the clear interface for function define. 2) + * the implementation should rely on ExprValue. */ @UtilityClass @SuppressWarnings("unchecked") public class DateTimeFunction { - //The number of seconds per day + // The number of seconds per day public static final long SECONDS_PER_DAY = 86400; // The number of days from year zero to year 1970. @@ -254,11 +251,12 @@ public void register(BuiltinFunctionRepository repository) { * `now(y) return different values. */ private FunctionResolver now(FunctionName functionName) { - return define(functionName, + return define( + functionName, implWithProperties( - functionProperties -> new ExprDatetimeValue( - formatNow(functionProperties.getQueryStartClock())), DATETIME) - ); + functionProperties -> + new ExprDatetimeValue(formatNow(functionProperties.getQueryStartClock())), + DATETIME)); } private FunctionResolver now() { @@ -277,25 +275,28 @@ private FunctionResolver localtime() { return now(BuiltinFunctionName.LOCALTIME.getName()); } - /** - * SYSDATE() returns the time at which it executes. - */ + /** SYSDATE() returns the time at which it executes. */ private FunctionResolver sysdate() { - return define(BuiltinFunctionName.SYSDATE.getName(), - implWithProperties(functionProperties - -> new ExprDatetimeValue(formatNow(Clock.systemDefaultZone())), DATETIME), - FunctionDSL.implWithProperties((functionProperties, v) -> new ExprDatetimeValue( - formatNow(Clock.systemDefaultZone(), v.integerValue())), DATETIME, INTEGER) - ); + return define( + BuiltinFunctionName.SYSDATE.getName(), + implWithProperties( + functionProperties -> new ExprDatetimeValue(formatNow(Clock.systemDefaultZone())), + DATETIME), + FunctionDSL.implWithProperties( + (functionProperties, v) -> + new ExprDatetimeValue(formatNow(Clock.systemDefaultZone(), v.integerValue())), + DATETIME, + INTEGER)); } - /** - * Synonym for @see `now`. - */ + /** Synonym for @see `now`. */ private FunctionResolver curtime(FunctionName functionName) { - return define(functionName, - implWithProperties(functionProperties -> new ExprTimeValue( - formatNow(functionProperties.getQueryStartClock()).toLocalTime()), TIME)); + return define( + functionName, + implWithProperties( + functionProperties -> + new ExprTimeValue(formatNow(functionProperties.getQueryStartClock()).toLocalTime()), + TIME)); } private FunctionResolver curtime() { @@ -307,9 +308,12 @@ private FunctionResolver current_time() { } private FunctionResolver curdate(FunctionName functionName) { - return define(functionName, - implWithProperties(functionProperties -> new ExprDateValue( - formatNow(functionProperties.getQueryStartClock()).toLocalDate()), DATE)); + return define( + functionName, + implWithProperties( + functionProperties -> + new ExprDateValue(formatNow(functionProperties.getQueryStartClock()).toLocalDate()), + DATE)); } private FunctionResolver curdate() { @@ -321,32 +325,32 @@ private FunctionResolver current_date() { } /** - * A common signature for `date_add` and `date_sub`. - * Specify a start date and add/subtract a temporal amount to/from the date. + * A common signature for `date_add` and `date_sub`.
+ * Specify a start date and add/subtract a temporal amount to/from the date.
* The return type depends on the date type and the interval unit. Detailed supported signatures: - * (DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME - * MySQL has these signatures too - * (DATE, INTERVAL) -> DATE // when interval has no time part - * (TIME, INTERVAL) -> TIME // when interval has no date part - * (STRING, INTERVAL) -> STRING // when argument has date or datetime string, - * // result has date or datetime depending on interval type + *
+ * (DATE/DATETIME/TIMESTAMP/TIME, INTERVAL) -> DATETIME
+ * MySQL has these signatures too
+ * (DATE, INTERVAL) -> DATE // when interval has no time part
+ * (TIME, INTERVAL) -> TIME // when interval has no date part
+ * (STRING, INTERVAL) -> STRING // when argument has date or datetime string,
+ * // result has date or datetime depending on interval type
*/ private Stream> get_date_add_date_sub_signatures( SerializableTriFunction function) { return Stream.of( implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATE, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), - DATETIME, DATETIME, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), - DATETIME, TIMESTAMP, INTERVAL), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, INTERVAL) - ); + implWithProperties( + nullMissingHandlingWithProperties(function), DATETIME, DATETIME, INTERVAL), + implWithProperties( + nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, INTERVAL), + implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, INTERVAL)); } /** - * A common signature for `adddate` and `subdate`. - * Adds/subtracts an integer number of days to/from the first argument. - * (DATE, LONG) -> DATE + * A common signature for `adddate` and `subdate`.
+ * Adds/subtracts an integer number of days to/from the first argument.
+ * (DATE, LONG) -> DATE
* (TIME/DATETIME/TIMESTAMP, LONG) -> DATETIME */ private Stream> get_adddate_subdate_signatures( @@ -355,87 +359,128 @@ private FunctionResolver current_date() { implWithProperties(nullMissingHandlingWithProperties(function), DATE, DATE, LONG), implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, DATETIME, LONG), implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIMESTAMP, LONG), - implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, LONG) - ); + implWithProperties(nullMissingHandlingWithProperties(function), DATETIME, TIME, LONG)); } private DefaultFunctionResolver adddate() { - return define(BuiltinFunctionName.ADDDATE.getName(), + return define( + BuiltinFunctionName.ADDDATE.getName(), (SerializableFunction>[]) (Stream.concat( - get_date_add_date_sub_signatures(DateTimeFunction::exprAddDateInterval), - get_adddate_subdate_signatures(DateTimeFunction::exprAddDateDays)) + get_date_add_date_sub_signatures(DateTimeFunction::exprAddDateInterval), + get_adddate_subdate_signatures(DateTimeFunction::exprAddDateDays)) .toArray(SerializableFunction[]::new))); } /** - * Adds expr2 to expr1 and returns the result. - * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME - * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME - * TODO: MySQL has these signatures too - * (STRING, STRING/TIME) -> STRING // second arg - string with time only - * (x, STRING) -> NULL // second arg - string with timestamp - * (x, STRING/DATE) -> x // second arg - string with date only + * Adds expr2 to expr1 and returns the result.
+ * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
+ * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * TODO: MySQL has these signatures too
+ * (STRING, STRING/TIME) -> STRING // second arg - string with time only
+ * (x, STRING) -> NULL // second arg - string with timestamp
+ * (x, STRING/DATE) -> x // second arg - string with date only */ private DefaultFunctionResolver addtime() { - return define(BuiltinFunctionName.ADDTIME.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - TIME, TIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATETIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, DATE, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), - DATETIME, TIMESTAMP, TIMESTAMP) - ); - } - - /** - * Converts date/time from a specified timezone to another specified timezone. - * The supported signatures: - * (DATETIME, STRING, STRING) -> DATETIME - * (STRING, STRING, STRING) -> DATETIME - */ - private DefaultFunctionResolver convert_tz() { - return define(BuiltinFunctionName.CONVERT_TZ.getName(), - impl(nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, DATETIME, STRING, STRING), - impl(nullMissingHandling(DateTimeFunction::exprConvertTZ), - DATETIME, STRING, STRING, STRING) - ); + return define( + BuiltinFunctionName.ADDTIME.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), TIME, TIME, DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + TIME, + TIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATETIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), DATETIME, DATE, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATE, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + DATE, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprAddTime), + DATETIME, + TIMESTAMP, + TIMESTAMP)); } /** - * Extracts the date part of a date and time value. - * Also to construct a date type. The supported signatures: - * STRING/DATE/DATETIME/TIMESTAMP -> DATE + * Converts date/time from a specified timezone to another specified timezone.
+ * The supported signatures:
+ * (DATETIME, STRING, STRING) -> DATETIME
+ * (STRING, STRING, STRING) -> DATETIME + */ + private DefaultFunctionResolver convert_tz() { + return define( + BuiltinFunctionName.CONVERT_TZ.getName(), + impl( + nullMissingHandling(DateTimeFunction::exprConvertTZ), + DATETIME, + DATETIME, + STRING, + STRING), + impl( + nullMissingHandling(DateTimeFunction::exprConvertTZ), + DATETIME, + STRING, + STRING, + STRING)); + } + + /** + * Extracts the date part of a date and time value. Also to construct a date type. The supported + * signatures: STRING/DATE/DATETIME/TIMESTAMP -> DATE */ private DefaultFunctionResolver date() { - return define(BuiltinFunctionName.DATE.getName(), + return define( + BuiltinFunctionName.DATE.getName(), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, STRING), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATE), impl(nullMissingHandling(DateTimeFunction::exprDate), DATE, DATETIME), @@ -447,273 +492,310 @@ private DefaultFunctionResolver date() { * (DATE/DATETIME/TIMESTAMP/TIME, DATE/DATETIME/TIMESTAMP/TIME) -> LONG */ private DefaultFunctionResolver datediff() { - return define(BuiltinFunctionName.DATEDIFF.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATE, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIMESTAMP, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, TIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), - LONG, DATETIME, TIME)); - } - - /** - * Specify a datetime with time zone field and a time zone to convert to. - * Returns a local date time. - * (STRING, STRING) -> DATETIME + return define( + BuiltinFunctionName.DATEDIFF.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATE, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, TIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), LONG, TIME, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATE, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIMESTAMP, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + TIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprDateDiff), + LONG, + DATETIME, + TIME)); + } + + /** + * Specify a datetime with time zone field and a time zone to convert to.
+ * Returns a local date time.
+ * (STRING, STRING) -> DATETIME
* (STRING) -> DATETIME */ private FunctionResolver datetime() { - return define(BuiltinFunctionName.DATETIME.getName(), - impl(nullMissingHandling(DateTimeFunction::exprDateTime), - DATETIME, STRING, STRING), - impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), - DATETIME, STRING) - ); + return define( + BuiltinFunctionName.DATETIME.getName(), + impl(nullMissingHandling(DateTimeFunction::exprDateTime), DATETIME, STRING, STRING), + impl(nullMissingHandling(DateTimeFunction::exprDateTimeNoTimezone), DATETIME, STRING)); } private DefaultFunctionResolver date_add() { - return define(BuiltinFunctionName.DATE_ADD.getName(), + return define( + BuiltinFunctionName.DATE_ADD.getName(), (SerializableFunction>[]) get_date_add_date_sub_signatures(DateTimeFunction::exprAddDateInterval) .toArray(SerializableFunction[]::new)); } private DefaultFunctionResolver date_sub() { - return define(BuiltinFunctionName.DATE_SUB.getName(), + return define( + BuiltinFunctionName.DATE_SUB.getName(), (SerializableFunction>[]) get_date_add_date_sub_signatures(DateTimeFunction::exprSubDateInterval) .toArray(SerializableFunction[]::new)); } - /** - * DAY(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). - */ + /** DAY(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver day() { - return define(BuiltinFunctionName.DAY.getName(), + return define( + BuiltinFunctionName.DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING)); } /** - * DAYNAME(STRING/DATE/DATETIME/TIMESTAMP). - * return the name of the weekday for date, including Monday, Tuesday, Wednesday, - * Thursday, Friday, Saturday and Sunday. + * DAYNAME(STRING/DATE/DATETIME/TIMESTAMP). return the name of the weekday for date, including + * Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday. */ private DefaultFunctionResolver dayName() { - return define(BuiltinFunctionName.DAYNAME.getName(), + return define( + BuiltinFunctionName.DAYNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayName), STRING, STRING)); } - /** - * DAYOFMONTH(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). - */ + /** DAYOFMONTH(STRING/DATE/DATETIME/TIMESTAMP). return the day of the month (1-31). */ private DefaultFunctionResolver dayOfMonth(BuiltinFunctionName name) { - return define(name.getName(), - implWithProperties(nullMissingHandlingWithProperties( - (functionProperties, arg) -> DateTimeFunction.dayOfMonthToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + name.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.dayOfMonthToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, STRING), - impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfMonth), INTEGER, TIMESTAMP)); } /** - * DAYOFWEEK(STRING/DATE/DATETIME/TIME/TIMESTAMP). - * return the weekday index for date (1 = Sunday, 2 = Monday, ..., 7 = Saturday). + * DAYOFWEEK(STRING/DATE/DATETIME/TIME/TIMESTAMP). return the weekday index for date (1 = Sunday, + * 2 = Monday, ..., 7 = Saturday). */ private DefaultFunctionResolver dayOfWeek(FunctionName name) { - return define(name, - implWithProperties(nullMissingHandlingWithProperties( - (functionProperties, arg) -> DateTimeFunction.dayOfWeekToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + name, + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.dayOfWeekToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfWeek), INTEGER, STRING)); } - /** - * DAYOFYEAR(STRING/DATE/DATETIME/TIMESTAMP). - * return the day of the year for date (1-366). - */ + /** DAYOFYEAR(STRING/DATE/DATETIME/TIMESTAMP). return the day of the year for date (1-366). */ private DefaultFunctionResolver dayOfYear(BuiltinFunctionName dayOfYear) { - return define(dayOfYear.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.dayOfYearToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + dayOfYear.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.dayOfYearToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprDayOfYear), INTEGER, STRING)); } private DefaultFunctionResolver extract() { - return define(BuiltinFunctionName.EXTRACT.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprExtractForTime), - LONG, STRING, TIME), + return define( + BuiltinFunctionName.EXTRACT.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprExtractForTime), + LONG, + STRING, + TIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATE), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprExtract), LONG, STRING, STRING)); } - /** - * FROM_DAYS(LONG). return the date value given the day number N. - */ + /** FROM_DAYS(LONG). return the date value given the day number N. */ private DefaultFunctionResolver from_days() { - return define(BuiltinFunctionName.FROM_DAYS.getName(), + return define( + BuiltinFunctionName.FROM_DAYS.getName(), impl(nullMissingHandling(DateTimeFunction::exprFromDays), DATE, LONG)); } private FunctionResolver from_unixtime() { - return define(BuiltinFunctionName.FROM_UNIXTIME.getName(), + return define( + BuiltinFunctionName.FROM_UNIXTIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprFromUnixTime), DATETIME, DOUBLE), - impl(nullMissingHandling(DateTimeFunction::exprFromUnixTimeFormat), - STRING, DOUBLE, STRING)); + impl( + nullMissingHandling(DateTimeFunction::exprFromUnixTimeFormat), STRING, DOUBLE, STRING)); } private DefaultFunctionResolver get_format() { - return define(BuiltinFunctionName.GET_FORMAT.getName(), - impl(nullMissingHandling(DateTimeFunction::exprGetFormat), STRING, STRING, STRING) - ); + return define( + BuiltinFunctionName.GET_FORMAT.getName(), + impl(nullMissingHandling(DateTimeFunction::exprGetFormat), STRING, STRING, STRING)); } - /** - * HOUR(STRING/TIME/DATETIME/DATE/TIMESTAMP). return the hour value for time. - */ + /** HOUR(STRING/TIME/DATETIME/DATE/TIMESTAMP). return the hour value for time. */ private DefaultFunctionResolver hour(BuiltinFunctionName name) { - return define(name.getName(), + return define( + name.getName(), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprHour), INTEGER, TIMESTAMP)); } - private DefaultFunctionResolver last_day() { - return define(BuiltinFunctionName.LAST_DAY.getName(), + private DefaultFunctionResolver last_day() { + return define( + BuiltinFunctionName.LAST_DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, STRING), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.exprLastDayToday( - functionProperties.getQueryStartClock())), DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.exprLastDayToday(functionProperties.getQueryStartClock())), + DATE, + TIME), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATE), impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprLastDay), DATE, TIMESTAMP)); } private FunctionResolver makedate() { - return define(BuiltinFunctionName.MAKEDATE.getName(), + return define( + BuiltinFunctionName.MAKEDATE.getName(), impl(nullMissingHandling(DateTimeFunction::exprMakeDate), DATE, DOUBLE, DOUBLE)); } private FunctionResolver maketime() { - return define(BuiltinFunctionName.MAKETIME.getName(), + return define( + BuiltinFunctionName.MAKETIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprMakeTime), TIME, DOUBLE, DOUBLE, DOUBLE)); } - /** - * MICROSECOND(STRING/TIME/DATETIME/TIMESTAMP). return the microsecond value for time. - */ + /** MICROSECOND(STRING/TIME/DATETIME/TIMESTAMP). return the microsecond value for time. */ private DefaultFunctionResolver microsecond() { - return define(BuiltinFunctionName.MICROSECOND.getName(), + return define( + BuiltinFunctionName.MICROSECOND.getName(), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprMicrosecond), INTEGER, TIMESTAMP)); } - /** - * MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. - */ + /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute(BuiltinFunctionName name) { - return define(name.getName(), + return define( + name.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, DATE), - impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprMinute), INTEGER, TIMESTAMP)); } - /** - * MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. - */ + /** MINUTE(STRING/TIME/DATETIME/TIMESTAMP). return the minute value for time. */ private DefaultFunctionResolver minute_of_day() { - return define(BuiltinFunctionName.MINUTE_OF_DAY.getName(), + return define( + BuiltinFunctionName.MINUTE_OF_DAY.getName(), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprMinuteOfDay), INTEGER, TIMESTAMP)); } - /** - * MONTH(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-12). - */ + /** MONTH(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-12). */ private DefaultFunctionResolver month(BuiltinFunctionName month) { - return define(month.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.monthOfYearToday( - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + month.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.monthOfYearToday(functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprMonth), INTEGER, STRING)); } - /** - * MONTHNAME(STRING/DATE/DATETIME/TIMESTAMP). return the full name of the month for date. - */ + /** MONTHNAME(STRING/DATE/DATETIME/TIMESTAMP). return the full name of the month for date. */ private DefaultFunctionResolver monthName() { - return define(BuiltinFunctionName.MONTHNAME.getName(), + return define( + BuiltinFunctionName.MONTHNAME.getName(), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATE), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprMonthName), STRING, STRING)); } /** @@ -721,130 +803,164 @@ private DefaultFunctionResolver monthName() { * (INTEGER, INTEGER) -> INTEGER */ private DefaultFunctionResolver period_add() { - return define(BuiltinFunctionName.PERIOD_ADD.getName(), - impl(nullMissingHandling(DateTimeFunction::exprPeriodAdd), INTEGER, INTEGER, INTEGER) - ); + return define( + BuiltinFunctionName.PERIOD_ADD.getName(), + impl(nullMissingHandling(DateTimeFunction::exprPeriodAdd), INTEGER, INTEGER, INTEGER)); } /** - * Returns the number of months between periods P1 and P2. - * P1 and P2 should be in the format YYMM or YYYYMM. + * Returns the number of months between periods P1 and P2. P1 and P2 should be in the format YYMM + * or YYYYMM.
* (INTEGER, INTEGER) -> INTEGER */ private DefaultFunctionResolver period_diff() { - return define(BuiltinFunctionName.PERIOD_DIFF.getName(), - impl(nullMissingHandling(DateTimeFunction::exprPeriodDiff), INTEGER, INTEGER, INTEGER) - ); + return define( + BuiltinFunctionName.PERIOD_DIFF.getName(), + impl(nullMissingHandling(DateTimeFunction::exprPeriodDiff), INTEGER, INTEGER, INTEGER)); } - /** - * QUARTER(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-4). - */ + /** QUARTER(STRING/DATE/DATETIME/TIMESTAMP). return the month for date (1-4). */ private DefaultFunctionResolver quarter() { - return define(BuiltinFunctionName.QUARTER.getName(), + return define( + BuiltinFunctionName.QUARTER.getName(), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprQuarter), INTEGER, STRING)); } private DefaultFunctionResolver sec_to_time() { - return define(BuiltinFunctionName.SEC_TO_TIME.getName(), + return define( + BuiltinFunctionName.SEC_TO_TIME.getName(), impl((nullMissingHandling(DateTimeFunction::exprSecToTime)), TIME, INTEGER), impl((nullMissingHandling(DateTimeFunction::exprSecToTime)), TIME, LONG), impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, DOUBLE), - impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, FLOAT) - ); + impl((nullMissingHandling(DateTimeFunction::exprSecToTimeWithNanos)), TIME, FLOAT)); } - /** - * SECOND(STRING/TIME/DATETIME/TIMESTAMP). return the second value for time. - */ + /** SECOND(STRING/TIME/DATETIME/TIMESTAMP). return the second value for time. */ private DefaultFunctionResolver second(BuiltinFunctionName name) { - return define(name.getName(), + return define( + name.getName(), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, STRING), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIME), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIMESTAMP) - ); + impl(nullMissingHandling(DateTimeFunction::exprSecond), INTEGER, TIMESTAMP)); } private DefaultFunctionResolver subdate() { - return define(BuiltinFunctionName.SUBDATE.getName(), + return define( + BuiltinFunctionName.SUBDATE.getName(), (SerializableFunction>[]) (Stream.concat( - get_date_add_date_sub_signatures(DateTimeFunction::exprSubDateInterval), - get_adddate_subdate_signatures(DateTimeFunction::exprSubDateDays)) + get_date_add_date_sub_signatures(DateTimeFunction::exprSubDateInterval), + get_adddate_subdate_signatures(DateTimeFunction::exprSubDateDays)) .toArray(SerializableFunction[]::new))); } /** - * Subtracts expr2 from expr1 and returns the result. - * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME - * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME - * TODO: MySQL has these signatures too - * (STRING, STRING/TIME) -> STRING // second arg - string with time only - * (x, STRING) -> NULL // second arg - string with timestamp - * (x, STRING/DATE) -> x // second arg - string with date only + * Subtracts expr2 from expr1 and returns the result.
+ * (TIME, TIME/DATE/DATETIME/TIMESTAMP) -> TIME
+ * (DATE/DATETIME/TIMESTAMP, TIME/DATE/DATETIME/TIMESTAMP) -> DATETIME
+ * TODO: MySQL has these signatures too
+ * (STRING, STRING/TIME) -> STRING // second arg - string with time only
+ * (x, STRING) -> NULL // second arg - string with timestamp
+ * (x, STRING/DATE) -> x // second arg - string with date only */ private DefaultFunctionResolver subtime() { - return define(BuiltinFunctionName.SUBTIME.getName(), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - TIME, TIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATETIME, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, DATE, TIMESTAMP), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, TIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, DATETIME), - implWithProperties(nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), - DATETIME, TIMESTAMP, TIMESTAMP) - ); - } - - /** - * Extracts a date, time, or datetime from the given string. - * It accomplishes this using another string which specifies the input format. + return define( + BuiltinFunctionName.SUBTIME.getName(), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), TIME, TIME, DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + TIME, + TIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATETIME, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), DATETIME, DATE, DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATE, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + DATE, + TIMESTAMP), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + TIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + DATETIME), + implWithProperties( + nullMissingHandlingWithProperties(DateTimeFunction::exprSubTime), + DATETIME, + TIMESTAMP, + TIMESTAMP)); + } + + /** + * Extracts a date, time, or datetime from the given string. It accomplishes this using another + * string which specifies the input format. */ private DefaultFunctionResolver str_to_date() { - return define(BuiltinFunctionName.STR_TO_DATE.getName(), + return define( + BuiltinFunctionName.STR_TO_DATE.getName(), implWithProperties( - nullMissingHandlingWithProperties((functionProperties, arg, format) - -> DateTimeFunction.exprStrToDate(functionProperties, arg, format)), - DATETIME, STRING, STRING)); + nullMissingHandlingWithProperties( + (functionProperties, arg, format) -> + DateTimeFunction.exprStrToDate(functionProperties, arg, format)), + DATETIME, + STRING, + STRING)); } /** - * Extracts the time part of a date and time value. - * Also to construct a time type. The supported signatures: - * STRING/DATE/DATETIME/TIME/TIMESTAMP -> TIME + * Extracts the time part of a date and time value. Also to construct a time type. The supported + * signatures: STRING/DATE/DATETIME/TIME/TIMESTAMP -> TIME */ private DefaultFunctionResolver time() { - return define(BuiltinFunctionName.TIME.getName(), + return define( + BuiltinFunctionName.TIME.getName(), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, STRING), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATE), impl(nullMissingHandling(DateTimeFunction::exprTime), TIME, DATETIME), @@ -853,18 +969,19 @@ private DefaultFunctionResolver time() { } /** - * Returns different between two times as a time. - * (TIME, TIME) -> TIME - * MySQL has these signatures too - * (DATE, DATE) -> TIME // result is > 24 hours - * (DATETIME, DATETIME) -> TIME // result is > 24 hours - * (TIMESTAMP, TIMESTAMP) -> TIME // result is > 24 hours - * (x, x) -> NULL // when args have different types - * (STRING, STRING) -> TIME // argument strings contain same types only - * (STRING, STRING) -> NULL // argument strings are different types + * Returns different between two times as a time.
+ * (TIME, TIME) -> TIME
+ * MySQL has these signatures too
+ * (DATE, DATE) -> TIME // result is > 24 hours
+ * (DATETIME, DATETIME) -> TIME // result is > 24 hours
+ * (TIMESTAMP, TIMESTAMP) -> TIME // result is > 24 hours
+ * (x, x) -> NULL // when args have different types
+ * (STRING, STRING) -> TIME // argument strings contain same types only
+ * (STRING, STRING) -> NULL // argument strings are different types */ private DefaultFunctionResolver timediff() { - return define(BuiltinFunctionName.TIMEDIFF.getName(), + return define( + BuiltinFunctionName.TIMEDIFF.getName(), impl(nullMissingHandling(DateTimeFunction::exprTimeDiff), TIME, TIME, TIME)); } @@ -872,90 +989,122 @@ private DefaultFunctionResolver timediff() { * TIME_TO_SEC(STRING/TIME/DATETIME/TIMESTAMP). return the time argument, converted to seconds. */ private DefaultFunctionResolver time_to_sec() { - return define(BuiltinFunctionName.TIME_TO_SEC.getName(), + return define( + BuiltinFunctionName.TIME_TO_SEC.getName(), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIME), impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, DATETIME) - ); + impl(nullMissingHandling(DateTimeFunction::exprTimeToSec), LONG, DATETIME)); } /** - * Extracts the timestamp of a date and time value. - * Input strings may contain a timestamp only in format 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]' - * STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP - * STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP + * Extracts the timestamp of a date and time value.
+ * Input strings may contain a timestamp only in format 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'
+ * STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
+ * STRING/DATE/TIME/DATETIME/TIMESTAMP, STRING/DATE/TIME/DATETIME/TIMESTAMP -> TIMESTAMP
* All types are converted to TIMESTAMP actually before the function call - it is responsibility + *
* of the automatic cast mechanism defined in `ExprCoreType` and performed by `TypeCastOperator`. */ private DefaultFunctionResolver timestamp() { - return define(BuiltinFunctionName.TIMESTAMP.getName(), + return define( + BuiltinFunctionName.TIMESTAMP.getName(), impl(nullMissingHandling(v -> v), TIMESTAMP, TIMESTAMP), // We can use FunctionProperties.None, because it is not used. It is required to convert // TIME to other datetime types, but arguments there are already converted. - impl(nullMissingHandling((v1, v2) -> exprAddTime(FunctionProperties.None, v1, v2)), - TIMESTAMP, TIMESTAMP, TIMESTAMP)); + impl( + nullMissingHandling((v1, v2) -> exprAddTime(FunctionProperties.None, v1, v2)), + TIMESTAMP, + TIMESTAMP, + TIMESTAMP)); } /** - * Adds an interval of time to the provided DATE/DATETIME/TIME/TIMESTAMP/STRING argument. - * The interval of time added is determined by the given first and second arguments. - * The first argument is an interval type, and must be one of the tokens below... - * [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] - * The second argument is the amount of the interval type to be added. - * The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING to add to. + * Adds an interval of time to the provided DATE/DATETIME/TIME/TIMESTAMP/STRING argument. The + * interval of time added is determined by the given first and second arguments. The first + * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, + * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument is the amount of the + * interval type to be added. The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING to add + * to. + * * @return The DATETIME representing the summed DATE/DATETIME/TIME/TIMESTAMP and interval. */ private DefaultFunctionResolver timestampadd() { - return define(BuiltinFunctionName.TIMESTAMPADD.getName(), - impl(nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, STRING, INTEGER, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprTimestampAdd), - DATETIME, STRING, INTEGER, TIMESTAMP), + return define( + BuiltinFunctionName.TIMESTAMPADD.getName(), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampAdd), + DATETIME, + STRING, + INTEGER, + DATETIME), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampAdd), + DATETIME, + STRING, + INTEGER, + TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties( - (functionProperties, part, amount, time) -> exprTimestampAddForTimeType( - functionProperties.getQueryStartClock(), - part, - amount, - time)), - DATETIME, STRING, INTEGER, TIME)); + (functionProperties, part, amount, time) -> + exprTimestampAddForTimeType( + functionProperties.getQueryStartClock(), part, amount, time)), + DATETIME, + STRING, + INTEGER, + TIME)); } /** - * Finds the difference between provided DATE/DATETIME/TIME/TIMESTAMP/STRING arguments. - * The first argument is an interval type, and must be one of the tokens below... - * [MICROSECOND, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] - * The second argument the DATE/DATETIME/TIME/TIMESTAMP/STRING representing the start time. - * The third argument is the DATE/DATETIME/TIME/TIMESTAMP/STRING representing the end time. + * Finds the difference between provided DATE/DATETIME/TIME/TIMESTAMP/STRING arguments. The first + * argument is an interval type, and must be one of the tokens below... [MICROSECOND, SECOND, + * MINUTE, HOUR, DAY, WEEK, MONTH, QUARTER, YEAR] The second argument the + * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the start time. The third argument is the + * DATE/DATETIME/TIME/TIMESTAMP/STRING representing the end time. + * * @return A LONG representing the difference between arguments, using the given interval type. */ private DefaultFunctionResolver timestampdiff() { - return define(BuiltinFunctionName.TIMESTAMPDIFF.getName(), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, DATETIME, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, DATETIME, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, TIMESTAMP, DATETIME), - impl(nullMissingHandling(DateTimeFunction::exprTimestampDiff), - DATETIME, STRING, TIMESTAMP, TIMESTAMP), + return define( + BuiltinFunctionName.TIMESTAMPDIFF.getName(), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + DATETIME, + DATETIME), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + DATETIME, + TIMESTAMP), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + TIMESTAMP, + DATETIME), + impl( + nullMissingHandling(DateTimeFunction::exprTimestampDiff), + DATETIME, + STRING, + TIMESTAMP, + TIMESTAMP), implWithProperties( nullMissingHandlingWithProperties( - (functionProperties, part, startTime, endTime) -> exprTimestampDiffForTimeType( - functionProperties, - part, - startTime, - endTime)), - DATETIME, STRING, TIME, TIME) - ); + (functionProperties, part, startTime, endTime) -> + exprTimestampDiffForTimeType(functionProperties, part, startTime, endTime)), + DATETIME, + STRING, + TIME, + TIME)); } - /** - * TO_DAYS(STRING/DATE/DATETIME/TIMESTAMP). return the day number of the given date. - */ + /** TO_DAYS(STRING/DATE/DATETIME/TIMESTAMP). return the day number of the given date. */ private DefaultFunctionResolver to_days() { - return define(BuiltinFunctionName.TO_DAYS.getName(), + return define( + BuiltinFunctionName.TO_DAYS.getName(), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, STRING), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprToDays), LONG, DATE), @@ -963,156 +1112,166 @@ private DefaultFunctionResolver to_days() { } /** - * TO_SECONDS(TIMESTAMP/LONG). return the seconds number of the given date. - * Arguments of type STRING/TIMESTAMP/LONG are also accepted. - * STRING/TIMESTAMP/LONG arguments are automatically cast to TIMESTAMP. + * TO_SECONDS(TIMESTAMP/LONG). return the seconds number of the given date. Arguments of type + * STRING/TIMESTAMP/LONG are also accepted. STRING/TIMESTAMP/LONG arguments are automatically cast + * to TIMESTAMP. */ private DefaultFunctionResolver to_seconds() { - return define(BuiltinFunctionName.TO_SECONDS.getName(), + return define( + BuiltinFunctionName.TO_SECONDS.getName(), impl(nullMissingHandling(DateTimeFunction::exprToSeconds), LONG, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprToSecondsForIntType), LONG, LONG)); } private FunctionResolver unix_timestamp() { - return define(BuiltinFunctionName.UNIX_TIMESTAMP.getName(), - implWithProperties(functionProperties - -> DateTimeFunction.unixTimeStamp(functionProperties.getQueryStartClock()), LONG), + return define( + BuiltinFunctionName.UNIX_TIMESTAMP.getName(), + implWithProperties( + functionProperties -> + DateTimeFunction.unixTimeStamp(functionProperties.getQueryStartClock()), + LONG), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATE), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DATETIME), impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DOUBLE) - ); + impl(nullMissingHandling(DateTimeFunction::unixTimeStampOf), DOUBLE, DOUBLE)); } - /** - * UTC_DATE(). return the current UTC Date in format yyyy-MM-dd - */ + /** UTC_DATE(). return the current UTC Date in format yyyy-MM-dd */ private DefaultFunctionResolver utc_date() { - return define(BuiltinFunctionName.UTC_DATE.getName(), - implWithProperties(functionProperties - -> exprUtcDate(functionProperties), DATE)); + return define( + BuiltinFunctionName.UTC_DATE.getName(), + implWithProperties(functionProperties -> exprUtcDate(functionProperties), DATE)); } - /** - * UTC_TIME(). return the current UTC Time in format HH:mm:ss - */ + /** UTC_TIME(). return the current UTC Time in format HH:mm:ss */ private DefaultFunctionResolver utc_time() { - return define(BuiltinFunctionName.UTC_TIME.getName(), - implWithProperties(functionProperties - -> exprUtcTime(functionProperties), TIME)); + return define( + BuiltinFunctionName.UTC_TIME.getName(), + implWithProperties(functionProperties -> exprUtcTime(functionProperties), TIME)); } - /** - * UTC_TIMESTAMP(). return the current UTC TimeStamp in format yyyy-MM-dd HH:mm:ss - */ + /** UTC_TIMESTAMP(). return the current UTC TimeStamp in format yyyy-MM-dd HH:mm:ss */ private DefaultFunctionResolver utc_timestamp() { - return define(BuiltinFunctionName.UTC_TIMESTAMP.getName(), - implWithProperties(functionProperties - -> exprUtcTimeStamp(functionProperties), DATETIME)); + return define( + BuiltinFunctionName.UTC_TIMESTAMP.getName(), + implWithProperties(functionProperties -> exprUtcTimeStamp(functionProperties), DATETIME)); } - /** - * WEEK(DATE[,mode]). return the week number for date. - */ + /** WEEK(DATE[,mode]). return the week number for date. */ private DefaultFunctionResolver week(BuiltinFunctionName week) { - return define(week.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> DateTimeFunction.weekOfYearToday( - DEFAULT_WEEK_OF_YEAR_MODE, - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + week.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + DateTimeFunction.weekOfYearToday( + DEFAULT_WEEK_OF_YEAR_MODE, functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprWeekWithoutMode), INTEGER, STRING), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, time, modeArg) - -> DateTimeFunction.weekOfYearToday( - modeArg, - functionProperties.getQueryStartClock())), INTEGER, TIME, INTEGER), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, time, modeArg) -> + DateTimeFunction.weekOfYearToday( + modeArg, functionProperties.getQueryStartClock())), + INTEGER, + TIME, + INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATE, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, TIMESTAMP, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, STRING, INTEGER) - ); + impl(nullMissingHandling(DateTimeFunction::exprWeek), INTEGER, STRING, INTEGER)); } private DefaultFunctionResolver weekday() { - return define(BuiltinFunctionName.WEEKDAY.getName(), - implWithProperties(nullMissingHandlingWithProperties( - (functionProperties, arg) -> new ExprIntegerValue( - formatNow(functionProperties.getQueryStartClock()).getDayOfWeek().getValue() - 1)), - INTEGER, TIME), + return define( + BuiltinFunctionName.WEEKDAY.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + new ExprIntegerValue( + formatNow(functionProperties.getQueryStartClock()).getDayOfWeek().getValue() + - 1)), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprWeekday), INTEGER, STRING)); } - /** - * YEAR(STRING/DATE/DATETIME/TIMESTAMP). return the year for date (1000-9999). - */ + /** YEAR(STRING/DATE/DATETIME/TIMESTAMP). return the year for date (1000-9999). */ private DefaultFunctionResolver year() { - return define(BuiltinFunctionName.YEAR.getName(), + return define( + BuiltinFunctionName.YEAR.getName(), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, TIMESTAMP), - impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, STRING) - ); + impl(nullMissingHandling(DateTimeFunction::exprYear), INTEGER, STRING)); } - /** - * YEARWEEK(DATE[,mode]). return the week number for date. - */ + /** YEARWEEK(DATE[,mode]). return the week number for date. */ private DefaultFunctionResolver yearweek() { - return define(BuiltinFunctionName.YEARWEEK.getName(), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, arg) - -> yearweekToday( - DEFAULT_WEEK_OF_YEAR_MODE, - functionProperties.getQueryStartClock())), INTEGER, TIME), + return define( + BuiltinFunctionName.YEARWEEK.getName(), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, arg) -> + yearweekToday( + DEFAULT_WEEK_OF_YEAR_MODE, functionProperties.getQueryStartClock())), + INTEGER, + TIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATE), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, DATETIME), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, TIMESTAMP), impl(nullMissingHandling(DateTimeFunction::exprYearweekWithoutMode), INTEGER, STRING), - implWithProperties(nullMissingHandlingWithProperties((functionProperties, time, modeArg) - -> yearweekToday( - modeArg, - functionProperties.getQueryStartClock())), INTEGER, TIME, INTEGER), + implWithProperties( + nullMissingHandlingWithProperties( + (functionProperties, time, modeArg) -> + yearweekToday(modeArg, functionProperties.getQueryStartClock())), + INTEGER, + TIME, + INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATE, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, DATETIME, INTEGER), impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, TIMESTAMP, INTEGER), - impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, STRING, INTEGER) - ); + impl(nullMissingHandling(DateTimeFunction::exprYearweek), INTEGER, STRING, INTEGER)); } /** - * Formats date according to format specifier. First argument is date, second is format. - * Detailed supported signatures: - * (STRING, STRING) -> STRING - * (DATE, STRING) -> STRING - * (DATETIME, STRING) -> STRING - * (TIME, STRING) -> STRING + * Formats date according to format specifier. First argument is date, second is format.
+ * Detailed supported signatures:
+ * (STRING, STRING) -> STRING
+ * (DATE, STRING) -> STRING
+ * (DATETIME, STRING) -> STRING
+ * (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ private DefaultFunctionResolver date_format() { - return define(BuiltinFunctionName.DATE_FORMAT.getName(), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, STRING, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, DATE, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, DATETIME, STRING), + return define( + BuiltinFunctionName.DATE_FORMAT.getName(), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, STRING, STRING), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATE, STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), STRING, DATETIME, STRING), implWithProperties( nullMissingHandlingWithProperties( - (functionProperties, time, formatString) - -> DateTimeFormatterUtil.getFormattedDateOfToday( + (functionProperties, time, formatString) -> + DateTimeFormatterUtil.getFormattedDateOfToday( formatString, time, functionProperties.getQueryStartClock())), - STRING, TIME, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), - STRING, TIMESTAMP, STRING) - ); + STRING, + TIME, + STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedDate), + STRING, + TIMESTAMP, + STRING)); } - private ExprValue dayOfMonthToday(Clock clock) { return new ExprIntegerValue(LocalDateTime.now(clock).getDayOfMonth()); } @@ -1144,8 +1303,8 @@ private ExprValue dayOfWeekToday(Clock clock) { * @param interval ExprValue of Interval type, the temporal amount to add. * @return Datetime resulted from `interval` added to `datetime`. */ - private ExprValue exprAddDateInterval(FunctionProperties functionProperties, - ExprValue datetime, ExprValue interval) { + private ExprValue exprAddDateInterval( + FunctionProperties functionProperties, ExprValue datetime, ExprValue interval) { return exprDateApplyInterval(functionProperties, datetime, interval.intervalValue(), true); } @@ -1158,36 +1317,37 @@ private ExprValue exprAddDateInterval(FunctionProperties functionProperties, * @param isAdd A flag: true to isAdd, false to subtract. * @return Datetime calculated. */ - private ExprValue exprDateApplyInterval(FunctionProperties functionProperties, - ExprValue datetime, - TemporalAmount interval, - Boolean isAdd) { + private ExprValue exprDateApplyInterval( + FunctionProperties functionProperties, + ExprValue datetime, + TemporalAmount interval, + Boolean isAdd) { var dt = extractDateTime(datetime, functionProperties); return new ExprDatetimeValue(isAdd ? dt.plus(interval) : dt.minus(interval)); } - + /** - * Formats date according to format specifier. First argument is time, second is format. - * Detailed supported signatures: - * (STRING, STRING) -> STRING - * (DATE, STRING) -> STRING - * (DATETIME, STRING) -> STRING - * (TIME, STRING) -> STRING + * Formats date according to format specifier. First argument is time, second is format.
+ * Detailed supported signatures:
+ * (STRING, STRING) -> STRING
+ * (DATE, STRING) -> STRING
+ * (DATETIME, STRING) -> STRING
+ * (TIME, STRING) -> STRING
* (TIMESTAMP, STRING) -> STRING */ private DefaultFunctionResolver time_format() { - return define(BuiltinFunctionName.TIME_FORMAT.getName(), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, STRING, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, DATE, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, DATETIME, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, TIME, STRING), - impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), - STRING, TIMESTAMP, STRING) - ); + return define( + BuiltinFunctionName.TIME_FORMAT.getName(), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, STRING, STRING), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATE, STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, DATETIME, STRING), + impl(nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), STRING, TIME, STRING), + impl( + nullMissingHandling(DateTimeFormatterUtil::getFormattedTime), + STRING, + TIMESTAMP, + STRING)); } /** @@ -1198,8 +1358,8 @@ private DefaultFunctionResolver time_format() { * @param days ExprValue of Long type, representing the number of days to add. * @return Date/Datetime resulted from days added to `datetime`. */ - private ExprValue exprAddDateDays(FunctionProperties functionProperties, - ExprValue datetime, ExprValue days) { + private ExprValue exprAddDateDays( + FunctionProperties functionProperties, ExprValue datetime, ExprValue days) { return exprDateApplyDays(functionProperties, datetime, days.longValue(), true); } @@ -1212,11 +1372,11 @@ private ExprValue exprAddDateDays(FunctionProperties functionProperties, * @param isAdd A flag: true to add, false to subtract. * @return Datetime calculated. */ - private ExprValue exprDateApplyDays(FunctionProperties functionProperties, - ExprValue datetime, Long days, Boolean isAdd) { + private ExprValue exprDateApplyDays( + FunctionProperties functionProperties, ExprValue datetime, Long days, Boolean isAdd) { if (datetime.type() == DATE) { - return new ExprDateValue(isAdd ? datetime.dateValue().plusDays(days) - : datetime.dateValue().minusDays(days)); + return new ExprDateValue( + isAdd ? datetime.dateValue().plusDays(days) : datetime.dateValue().minusDays(days)); } var dt = extractDateTime(datetime, functionProperties); return new ExprDatetimeValue(isAdd ? dt.plusDays(days) : dt.minusDays(days)); @@ -1231,12 +1391,16 @@ private ExprValue exprDateApplyDays(FunctionProperties functionProperties, * @param isAdd A flag: true to add, false to subtract. * @return A value calculated. */ - private ExprValue exprApplyTime(FunctionProperties functionProperties, - ExprValue temporal, ExprValue temporalDelta, Boolean isAdd) { + private ExprValue exprApplyTime( + FunctionProperties functionProperties, + ExprValue temporal, + ExprValue temporalDelta, + Boolean isAdd) { var interval = Duration.between(LocalTime.MIN, temporalDelta.timeValue()); - var result = isAdd - ? extractDateTime(temporal, functionProperties).plus(interval) - : extractDateTime(temporal, functionProperties).minus(interval); + var result = + isAdd + ? extractDateTime(temporal, functionProperties).plus(interval) + : extractDateTime(temporal, functionProperties).minus(interval); return temporal.type() == TIME ? new ExprTimeValue(result.toLocalTime()) : new ExprDatetimeValue(result); @@ -1250,18 +1414,18 @@ private ExprValue exprApplyTime(FunctionProperties functionProperties, * @param temporalDelta A Date/Time/Datetime/Timestamp object to add time from. * @return A value calculated. */ - private ExprValue exprAddTime(FunctionProperties functionProperties, - ExprValue temporal, ExprValue temporalDelta) { + private ExprValue exprAddTime( + FunctionProperties functionProperties, ExprValue temporal, ExprValue temporalDelta) { return exprApplyTime(functionProperties, temporal, temporalDelta, true); } /** - * CONVERT_TZ function implementation for ExprValue. - * Returns null for time zones outside of +13:00 and -12:00. + * CONVERT_TZ function implementation for ExprValue. Returns null for time zones outside of +13:00 + * and -12:00. * * @param startingDateTime ExprValue of DateTime that is being converted from - * @param fromTz ExprValue of time zone, representing the time to convert from. - * @param toTz ExprValue of time zone, representing the time to convert to. + * @param fromTz ExprValue of time zone, representing the time to convert from. + * @param toTz ExprValue of time zone, representing the time to convert to. * @return DateTime that has been converted to the to_tz timezone. */ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, ExprValue toTz) { @@ -1278,8 +1442,7 @@ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, Ex || !DateTimeUtils.isValidMySqlTimeZoneId(convertedToTz)) { return ExprNullValue.of(); } - ZonedDateTime zonedDateTime = - startingDateTime.datetimeValue().atZone(convertedFromTz); + ZonedDateTime zonedDateTime = startingDateTime.datetimeValue().atZone(convertedFromTz); return new ExprDatetimeValue( zonedDateTime.withZoneSameInstant(convertedToTz).toLocalDateTime()); @@ -1305,19 +1468,19 @@ private ExprValue exprDate(ExprValue exprValue) { } /** - * Calculate the value in days from one date to the other. - * Only the date parts of the values are used in the calculation. + * Calculate the value in days from one date to the other. Only the date parts of the values are + * used in the calculation. * * @param first The first value. * @param second The second value. * @return The diff. */ - private ExprValue exprDateDiff(FunctionProperties functionProperties, - ExprValue first, ExprValue second) { + private ExprValue exprDateDiff( + FunctionProperties functionProperties, ExprValue first, ExprValue second) { // java inverses the value, so we have to swap 1 and 2 - return new ExprLongValue(DAYS.between( - extractDate(second, functionProperties), - extractDate(first, functionProperties))); + return new ExprLongValue( + DAYS.between( + extractDate(second, functionProperties), extractDate(first, functionProperties))); } /** @@ -1357,10 +1520,7 @@ private ExprValue exprDateTime(ExprValue dateTime, ExprValue timeZone) { ldt = new ExprDatetimeValue(dateTime.stringValue()); toTz = defaultTimeZone; } - convertTZResult = exprConvertTZ( - ldt, - new ExprStringValue(toTz), - timeZone); + convertTZResult = exprConvertTZ(ldt, new ExprStringValue(toTz), timeZone); return convertTZResult; } @@ -1426,8 +1586,8 @@ private ExprValue exprDayOfYear(ExprValue date) { public ExprLongValue formatExtractFunction(ExprValue part, ExprValue datetime) { String partName = part.stringValue().toUpperCase(); LocalDateTime arg = datetime.datetimeValue(); - String text = arg.format(DateTimeFormatter.ofPattern( - extract_formats.get(partName), Locale.ENGLISH)); + String text = + arg.format(DateTimeFormatter.ofPattern(extract_formats.get(partName), Locale.ENGLISH)); return new ExprLongValue(Long.parseLong(text)); } @@ -1450,12 +1610,10 @@ private ExprValue exprExtract(ExprValue part, ExprValue datetime) { * @param time The time to be formatted. * @return A LONG */ - private ExprValue exprExtractForTime(FunctionProperties functionProperties, - ExprValue part, - ExprValue time) { + private ExprValue exprExtractForTime( + FunctionProperties functionProperties, ExprValue part, ExprValue time) { return formatExtractFunction( - part, - new ExprDatetimeValue(extractDateTime(time, functionProperties))); + part, new ExprDatetimeValue(extractDateTime(time, functionProperties))); } /** @@ -1484,9 +1642,8 @@ private ExprValue exprFromUnixTime(ExprValue time) { private LocalDateTime exprFromUnixTimeImpl(ExprValue time) { return LocalDateTime.ofInstant( - Instant.ofEpochSecond((long)Math.floor(time.doubleValue())), - UTC_ZONE_ID) - .withNano((int)((time.doubleValue() % 1) * 1E9)); + Instant.ofEpochSecond((long) Math.floor(time.doubleValue())), UTC_ZONE_ID) + .withNano((int) ((time.doubleValue() % 1) * 1E9)); } private ExprValue exprFromUnixTimeFormat(ExprValue time, ExprValue format) { @@ -1506,9 +1663,8 @@ private ExprValue exprFromUnixTimeFormat(ExprValue time, ExprValue format) { */ private ExprValue exprGetFormat(ExprValue type, ExprValue format) { if (formats.contains(type.stringValue().toLowerCase(), format.stringValue().toLowerCase())) { - return new ExprStringValue(formats.get( - type.stringValue().toLowerCase(), - format.stringValue().toLowerCase())); + return new ExprStringValue( + formats.get(type.stringValue().toLowerCase(), format.stringValue().toLowerCase())); } return ExprNullValue.of(); @@ -1521,8 +1677,7 @@ private ExprValue exprGetFormat(ExprValue type, ExprValue format) { * @return ExprValue. */ private ExprValue exprHour(ExprValue time) { - return new ExprIntegerValue( - HOURS.between(LocalTime.MIN, time.timeValue())); + return new ExprIntegerValue(HOURS.between(LocalTime.MIN, time.timeValue())); } /** @@ -1533,9 +1688,7 @@ private ExprValue exprHour(ExprValue time) { */ private LocalDate getLastDay(LocalDate today) { return LocalDate.of( - today.getYear(), - today.getMonth(), - today.getMonth().length(today.isLeapYear())); + today.getYear(), today.getMonth(), today.getMonth().length(today.isLeapYear())); } /** @@ -1559,12 +1712,15 @@ private ExprValue exprLastDayToday(Clock clock) { } /** - * Following MySQL, function receives arguments of type double and rounds them before use. - * Furthermore: - * - zero year interpreted as 2000 - * - negative year is not accepted - * - @dayOfYear should be greater than 1 - * - if @dayOfYear is greater than 365/366, calculation goes to the next year(s) + * Following MySQL, function receives arguments of type double and rounds them before use.
+ * Furthermore:
+ * + *
    + *
  • zero year interpreted as 2000 + *
  • negative year is not accepted + *
  • @dayOfYear should be greater than 1 + *
  • if @dayOfYear is greater than 365/366, calculation goes to the next year(s) + *
* * @param yearExpr year * @param dayOfYearExp day of the @year, starting from 1 @@ -1580,12 +1736,13 @@ private ExprValue exprMakeDate(ExprValue yearExpr, ExprValue dayOfYearExp) { if (0 == year) { year = 2000; } - return new ExprDateValue(LocalDate.ofYearDay((int)year, 1).plusDays(dayOfYear - 1)); + return new ExprDateValue(LocalDate.ofYearDay((int) year, 1).plusDays(dayOfYear - 1)); } /** * Following MySQL, function receives arguments of type double. @hour and @minute are rounded, * while @second used as is, including fraction part. + * * @param hourExpr hour * @param minuteExpr minute * @param secondExpr second @@ -1598,8 +1755,9 @@ private ExprValue exprMakeTime(ExprValue hourExpr, ExprValue minuteExpr, ExprVal if (0 > hour || 0 > minute || 0 > second) { return ExprNullValue.of(); } - return new ExprTimeValue(LocalTime.parse(String.format("%02d:%02d:%012.9f", - hour, minute, second), DateTimeFormatter.ISO_TIME)); + return new ExprTimeValue( + LocalTime.parse( + String.format("%02d:%02d:%012.9f", hour, minute, second), DateTimeFormatter.ISO_TIME)); } /** @@ -1620,8 +1778,7 @@ private ExprValue exprMicrosecond(ExprValue time) { * @return ExprValue. */ private ExprValue exprMinute(ExprValue time) { - return new ExprIntegerValue( - (MINUTES.between(LocalTime.MIN, time.timeValue()) % 60)); + return new ExprIntegerValue((MINUTES.between(LocalTime.MIN, time.timeValue()) % 60)); } /** @@ -1631,8 +1788,7 @@ private ExprValue exprMinute(ExprValue time) { * @return ExprValue. */ private ExprValue exprMinuteOfDay(ExprValue time) { - return new ExprIntegerValue( - MINUTES.between(LocalTime.MIN, time.timeValue())); + return new ExprIntegerValue(MINUTES.between(LocalTime.MIN, time.timeValue())); } /** @@ -1675,8 +1831,7 @@ private LocalDate parseDatePeriod(Integer period) { } /** - * Adds N months to period P (in the format YYMM or YYYYMM). - * Returns a value in the format YYYYMM. + * Adds N months to period P (in the format YYMM or YYYYMM). Returns a value in the format YYYYMM. * * @param period Period in the format YYMM or YYYYMM. * @param months Amount of months to add. @@ -1684,19 +1839,20 @@ private LocalDate parseDatePeriod(Integer period) { */ private ExprValue exprPeriodAdd(ExprValue period, ExprValue months) { // We should add a day to make string parsable and remove it afterwards - var input = period.integerValue() * 100 + 1; // adds 01 to end of the string + var input = period.integerValue() * 100 + 1; // adds 01 to end of the string var parsedDate = parseDatePeriod(input); if (parsedDate == null) { return ExprNullValue.of(); } var res = DATE_FORMATTER_LONG_YEAR.format(parsedDate.plusMonths(months.integerValue())); - return new ExprIntegerValue(Integer.parseInt( - res.substring(0, res.length() - 2))); // Remove the day part, .eg. 20070101 -> 200701 + return new ExprIntegerValue( + Integer.parseInt( + res.substring(0, res.length() - 2))); // Remove the day part, .eg. 20070101 -> 200701 } /** - * Returns the number of months between periods P1 and P2. - * P1 and P2 should be in the format YYMM or YYYYMM. + * Returns the number of months between periods P1 and P2. P1 and P2 should be in the format YYMM + * or YYYYMM. * * @param period1 Period in the format YYMM or YYYYMM. * @param period2 Period in the format YYMM or YYYYMM. @@ -1724,6 +1880,7 @@ private ExprValue exprQuarter(ExprValue date) { /** * Returns TIME value of sec_to_time function for an INTEGER or LONG arguments. + * * @param totalSeconds The total number of seconds * @return A TIME value */ @@ -1732,17 +1889,17 @@ private ExprValue exprSecToTime(ExprValue totalSeconds) { } /** - * Helper function which obtains the decimal portion of the seconds value passed in. - * Uses BigDecimal to prevent issues with math on floating point numbers. - * Return is formatted to be used with Duration.ofSeconds(); + * Helper function which obtains the decimal portion of the seconds value passed in. Uses + * BigDecimal to prevent issues with math on floating point numbers. Return is formatted to be + * used with Duration.ofSeconds(); * * @param seconds and ExprDoubleValue or ExprFloatValue for the seconds * @return A LONG representing the nanoseconds portion */ private long formatNanos(ExprValue seconds) { - //Convert ExprValue to BigDecimal + // Convert ExprValue to BigDecimal BigDecimal formattedNanos = BigDecimal.valueOf(seconds.doubleValue()); - //Extract only the nanosecond part + // Extract only the nanosecond part formattedNanos = formattedNanos.subtract(BigDecimal.valueOf(formattedNanos.intValue())); return formattedNanos.scaleByPowerOfTen(9).longValue(); @@ -1750,6 +1907,7 @@ private long formatNanos(ExprValue seconds) { /** * Returns TIME value of sec_to_time function for FLOAT or DOUBLE arguments. + * * @param totalSeconds The total number of seconds * @return A TIME value */ @@ -1767,8 +1925,7 @@ private ExprValue exprSecToTimeWithNanos(ExprValue totalSeconds) { * @return ExprValue. */ private ExprValue exprSecond(ExprValue time) { - return new ExprIntegerValue( - (SECONDS.between(LocalTime.MIN, time.timeValue()) % 60)); + return new ExprIntegerValue((SECONDS.between(LocalTime.MIN, time.timeValue()) % 60)); } /** @@ -1779,8 +1936,8 @@ private ExprValue exprSecond(ExprValue time) { * @param days ExprValue of Long type, representing the number of days to subtract. * @return Date/Datetime resulted from days subtracted to date. */ - private ExprValue exprSubDateDays(FunctionProperties functionProperties, - ExprValue date, ExprValue days) { + private ExprValue exprSubDateDays( + FunctionProperties functionProperties, ExprValue date, ExprValue days) { return exprDateApplyDays(functionProperties, date, days.longValue(), false); } @@ -1792,8 +1949,8 @@ private ExprValue exprSubDateDays(FunctionProperties functionProperties, * @param expr ExprValue of Interval type, the temporal amount to subtract. * @return Datetime resulted from expr subtracted to `datetime`. */ - private ExprValue exprSubDateInterval(FunctionProperties functionProperties, - ExprValue datetime, ExprValue expr) { + private ExprValue exprSubDateInterval( + FunctionProperties functionProperties, ExprValue datetime, ExprValue expr) { return exprDateApplyInterval(functionProperties, datetime, expr.intervalValue(), false); } @@ -1804,14 +1961,13 @@ private ExprValue exprSubDateInterval(FunctionProperties functionProperties, * @param temporalDelta A Date/Time/Datetime/Timestamp to subtract time from. * @return A value calculated. */ - private ExprValue exprSubTime(FunctionProperties functionProperties, - ExprValue temporal, ExprValue temporalDelta) { + private ExprValue exprSubTime( + FunctionProperties functionProperties, ExprValue temporal, ExprValue temporalDelta) { return exprApplyTime(functionProperties, temporal, temporalDelta, false); } - private ExprValue exprStrToDate(FunctionProperties fp, - ExprValue dateTimeExpr, - ExprValue formatStringExp) { + private ExprValue exprStrToDate( + FunctionProperties fp, ExprValue dateTimeExpr, ExprValue formatStringExp) { return DateTimeFormatterUtil.parseStringWithDateOrTime(fp, dateTimeExpr, formatStringExp); } @@ -1838,8 +1994,8 @@ private ExprValue exprTime(ExprValue exprValue) { */ private ExprValue exprTimeDiff(ExprValue first, ExprValue second) { // java inverses the value, so we have to swap 1 and 2 - return new ExprTimeValue(LocalTime.MIN.plus( - Duration.between(second.timeValue(), first.timeValue()))); + return new ExprTimeValue( + LocalTime.MIN.plus(Duration.between(second.timeValue(), first.timeValue()))); } /** @@ -1852,9 +2008,8 @@ private ExprValue exprTimeToSec(ExprValue time) { return new ExprLongValue(time.timeValue().toSecondOfDay()); } - private ExprValue exprTimestampAdd(ExprValue partExpr, - ExprValue amountExpr, - ExprValue datetimeExpr) { + private ExprValue exprTimestampAdd( + ExprValue partExpr, ExprValue amountExpr, ExprValue datetimeExpr) { String part = partExpr.stringValue(); int amount = amountExpr.integerValue(); LocalDateTime datetime = datetimeExpr.datetimeValue(); @@ -1895,13 +2050,9 @@ private ExprValue exprTimestampAdd(ExprValue partExpr, return new ExprDatetimeValue(datetime.plus(amount, temporalUnit)); } - private ExprValue exprTimestampAddForTimeType(Clock clock, - ExprValue partExpr, - ExprValue amountExpr, - ExprValue timeExpr) { - LocalDateTime datetime = LocalDateTime.of( - formatNow(clock).toLocalDate(), - timeExpr.timeValue()); + private ExprValue exprTimestampAddForTimeType( + Clock clock, ExprValue partExpr, ExprValue amountExpr, ExprValue timeExpr) { + LocalDateTime datetime = LocalDateTime.of(formatNow(clock).toLocalDate(), timeExpr.timeValue()); return exprTimestampAdd(partExpr, amountExpr, new ExprDatetimeValue(datetime)); } @@ -1942,19 +2093,13 @@ private ExprValue getTimeDifference(String part, LocalDateTime startTime, LocalD } private ExprValue exprTimestampDiff( - ExprValue partExpr, - ExprValue startTimeExpr, - ExprValue endTimeExpr) { + ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( - partExpr.stringValue(), - startTimeExpr.datetimeValue(), - endTimeExpr.datetimeValue()); + partExpr.stringValue(), startTimeExpr.datetimeValue(), endTimeExpr.datetimeValue()); } - private ExprValue exprTimestampDiffForTimeType(FunctionProperties fp, - ExprValue partExpr, - ExprValue startTimeExpr, - ExprValue endTimeExpr) { + private ExprValue exprTimestampDiffForTimeType( + FunctionProperties fp, ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( partExpr.stringValue(), extractDateTime(startTimeExpr, fp), @@ -1988,8 +2133,8 @@ private ExprValue exprUtcTime(FunctionProperties functionProperties) { * @return ExprValue. */ private ExprValue exprUtcTimeStamp(FunctionProperties functionProperties) { - var zdt = ZonedDateTime.now(functionProperties.getQueryStartClock()) - .withZoneSameInstant(UTC_ZONE_ID); + var zdt = + ZonedDateTime.now(functionProperties.getQueryStartClock()).withZoneSameInstant(UTC_ZONE_ID); return new ExprDatetimeValue(zdt.toLocalDateTime()); } @@ -2027,25 +2172,25 @@ private DateTimeFormatter getFormatter(int dateAsInt) { throw new DateTimeException("Integer argument was out of range"); } - //Check below from YYYYMMDD - MMDD which format should be used + // Check below from YYYYMMDD - MMDD which format should be used switch (length) { - //Check if dateAsInt is at least 8 digits long + // Check if dateAsInt is at least 8 digits long case FULL_DATE_LENGTH: return DATE_FORMATTER_LONG_YEAR; - //Check if dateAsInt is at least 6 digits long + // Check if dateAsInt is at least 6 digits long case SHORT_DATE_LENGTH: return DATE_FORMATTER_SHORT_YEAR; - //Check if dateAsInt is at least 5 digits long + // Check if dateAsInt is at least 5 digits long case SINGLE_DIGIT_YEAR_DATE_LENGTH: return DATE_FORMATTER_SINGLE_DIGIT_YEAR; - //Check if dateAsInt is at least 4 digits long + // Check if dateAsInt is at least 4 digits long case NO_YEAR_DATE_LENGTH: return DATE_FORMATTER_NO_YEAR; - //Check if dateAsInt is at least 3 digits long + // Check if dateAsInt is at least 3 digits long case SINGLE_DIGIT_MONTH_DATE_LENGTH: return DATE_FORMATTER_SINGLE_DIGIT_MONTH; @@ -2064,15 +2209,16 @@ private DateTimeFormatter getFormatter(int dateAsInt) { */ private ExprValue exprToSecondsForIntType(ExprValue dateExpr) { try { - //Attempt to parse integer argument as date - LocalDate date = LocalDate.parse(String.valueOf(dateExpr.integerValue()), - getFormatter(dateExpr.integerValue())); + // Attempt to parse integer argument as date + LocalDate date = + LocalDate.parse( + String.valueOf(dateExpr.integerValue()), getFormatter(dateExpr.integerValue())); - return new ExprLongValue(date.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) - + DAYS_0000_TO_1970 * SECONDS_PER_DAY); + return new ExprLongValue( + date.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + DAYS_0000_TO_1970 * SECONDS_PER_DAY); } catch (DateTimeException ignored) { - //Return null if parsing error + // Return null if parsing error return ExprNullValue.of(); } } @@ -2121,12 +2267,14 @@ private ExprValue unixTimeStampOf(ExprValue value) { private Double unixTimeStampOfImpl(ExprValue value) { // Also, according to MySQL documentation: // The date argument may be a DATE, DATETIME, or TIMESTAMP ... - switch ((ExprCoreType)value.type()) { - case DATE: return value.dateValue().toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; - case DATETIME: return value.datetimeValue().toEpochSecond(ZoneOffset.UTC) - + value.datetimeValue().getNano() / 1E9; - case TIMESTAMP: return value.timestampValue().getEpochSecond() - + value.timestampValue().getNano() / 1E9; + switch ((ExprCoreType) value.type()) { + case DATE: + return value.dateValue().toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; + case DATETIME: + return value.datetimeValue().toEpochSecond(ZoneOffset.UTC) + + value.datetimeValue().getNano() / 1E9; + case TIMESTAMP: + return value.timestampValue().getEpochSecond() + value.timestampValue().getNano() / 1E9; default: // ... or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. // If the argument includes a time part, it may optionally include a fractional @@ -2172,8 +2320,8 @@ private Double unixTimeStampOfImpl(ExprValue value) { } /** - * Week for date implementation for ExprValue. - * When mode is not specified default value mode 0 is used for default_week_format. + * Week for date implementation for ExprValue. When mode is not specified default value mode 0 is + * used for default_week_format. * * @param date ExprValue of Date/Datetime/Timestamp/String type. * @return ExprValue. @@ -2203,12 +2351,11 @@ private ExprIntegerValue extractYearweek(LocalDate date, int mode) { // Needed to align with MySQL. Due to how modes for this function work. // See description of modes here ... // https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_week - int modeJava = CalendarLookup.getWeekNumber(mode, date) != 0 ? mode : - mode <= 4 ? 2 : - 7; + int modeJava = CalendarLookup.getWeekNumber(mode, date) != 0 ? mode : mode <= 4 ? 2 : 7; - int formatted = CalendarLookup.getYearNumber(modeJava, date) * 100 - + CalendarLookup.getWeekNumber(modeJava, date); + int formatted = + CalendarLookup.getYearNumber(modeJava, date) * 100 + + CalendarLookup.getWeekNumber(modeJava, date); return new ExprIntegerValue(formatted); } @@ -2224,8 +2371,8 @@ private ExprValue exprYearweek(ExprValue date, ExprValue mode) { } /** - * Yearweek for date implementation for ExprValue. - * When mode is not specified default value mode 0 is used. + * Yearweek for date implementation for ExprValue. When mode is not specified default value mode 0 + * is used. * * @param date ExprValue of Date/Datetime/Time/Timestamp/String type. * @return ExprValue. @@ -2248,19 +2395,22 @@ private LocalDateTime formatNow(Clock clock) { /** * Prepare LocalDateTime value. Truncate fractional second part according to the argument. - * @param fsp argument is given to specify a fractional seconds precision from 0 to 6, - * the return value includes a fractional seconds part of that many digits. + * + * @param fsp argument is given to specify a fractional seconds precision from 0 to 6, the return + * value includes a fractional seconds part of that many digits. * @return LocalDateTime object. */ - private LocalDateTime formatNow(Clock clock, Integer fsp) { + private LocalDateTime formatNow(Clock clock, Integer fsp) { var res = LocalDateTime.now(clock); var defaultPrecision = 9; // There are 10^9 nanoseconds in one second if (fsp < 0 || fsp > 6) { // Check that the argument is in the allowed range [0, 6] throw new IllegalArgumentException( String.format("Invalid `fsp` value: %d, allowed 0 to 6", fsp)); } - var nano = new BigDecimal(res.getNano()) - .setScale(fsp - defaultPrecision, RoundingMode.DOWN).intValue(); + var nano = + new BigDecimal(res.getNano()) + .setScale(fsp - defaultPrecision, RoundingMode.DOWN) + .intValue(); return res.withNano(nano); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java b/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java index 3df8489b20..5170d49fc7 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/IntervalClause.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.datetime; import static org.opensearch.sql.data.model.ExprValueUtils.getIntegerValue; @@ -45,7 +44,8 @@ public void register(BuiltinFunctionRepository repository) { } private DefaultFunctionResolver interval() { - return define(BuiltinFunctionName.INTERVAL.getName(), + return define( + BuiltinFunctionName.INTERVAL.getName(), impl(nullMissingHandling(IntervalClause::interval), INTERVAL, INTEGER, STRING), impl(nullMissingHandling(IntervalClause::interval), INTERVAL, LONG, STRING)); } diff --git a/core/src/main/java/org/opensearch/sql/expression/env/Environment.java b/core/src/main/java/org/opensearch/sql/expression/env/Environment.java index d96d0c0a50..b1377f22ae 100644 --- a/core/src/main/java/org/opensearch/sql/expression/env/Environment.java +++ b/core/src/main/java/org/opensearch/sql/expression/env/Environment.java @@ -3,33 +3,30 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.env; /** * The definition of the environment. - * @param the type of expression + * + * @param the type of expression * @param the type of expression value */ public interface Environment { - /** - * resolve the value of expression from the environment. - */ + /** resolve the value of expression from the environment. */ V resolve(E var); /** * Extend the environment. * - * @param env environment - * @param expr expression. - * @param value expression value. - * @param the type of expression + * @param env environment + * @param expr expression. + * @param value expression value. + * @param the type of expression * @param the type of expression value * @return extended environment. */ - static Environment extendEnv( - Environment env, E expr, V value) { + static Environment extendEnv(Environment env, E expr, V value) { return var -> { if (var.equals(expr)) { return value; diff --git a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java index 728712f537..f50fa927b8 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionName.java @@ -12,15 +12,11 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Builtin Function Name. - */ +/** Builtin Function Name. */ @Getter @RequiredArgsConstructor public enum BuiltinFunctionName { - /** - * Mathematical Functions. - */ + /** Mathematical Functions. */ ABS(FunctionName.of("abs")), CEIL(FunctionName.of("ceil")), CEILING(FunctionName.of("ceiling")), @@ -59,9 +55,7 @@ public enum BuiltinFunctionName { SIN(FunctionName.of("sin")), TAN(FunctionName.of("tan")), - /** - * Date and Time Functions. - */ + /** Date and Time Functions. */ ADDDATE(FunctionName.of("adddate")), ADDTIME(FunctionName.of("addtime")), CONVERT_TZ(FunctionName.of("convert_tz")), @@ -135,14 +129,10 @@ public enum BuiltinFunctionName { LOCALTIMESTAMP(FunctionName.of("localtimestamp")), SYSDATE(FunctionName.of("sysdate")), - /** - * Text Functions. - */ + /** Text Functions. */ TOSTRING(FunctionName.of("tostring")), - /** - * Arithmetic Operators. - */ + /** Arithmetic Operators. */ ADD(FunctionName.of("+")), ADDFUNCTION(FunctionName.of("add")), DIVIDE(FunctionName.of("/")), @@ -155,9 +145,7 @@ public enum BuiltinFunctionName { SUBTRACT(FunctionName.of("-")), SUBTRACTFUNCTION(FunctionName.of("subtract")), - /** - * Boolean Operators. - */ + /** Boolean Operators. */ AND(FunctionName.of("and")), OR(FunctionName.of("or")), XOR(FunctionName.of("xor")), @@ -171,9 +159,7 @@ public enum BuiltinFunctionName { LIKE(FunctionName.of("like")), NOT_LIKE(FunctionName.of("not like")), - /** - * Aggregation Function. - */ + /** Aggregation Function. */ AVG(FunctionName.of("avg")), SUM(FunctionName.of("sum")), COUNT(FunctionName.of("count")), @@ -192,9 +178,7 @@ public enum BuiltinFunctionName { // Not always an aggregation query NESTED(FunctionName.of("nested")), - /** - * Text Functions. - */ + /** Text Functions. */ ASCII(FunctionName.of("ascii")), CONCAT(FunctionName.of("concat")), CONCAT_WS(FunctionName.of("concat_ws")), @@ -215,9 +199,7 @@ public enum BuiltinFunctionName { TRIM(FunctionName.of("trim")), UPPER(FunctionName.of("upper")), - /** - * NULL Test. - */ + /** NULL Test. */ IS_NULL(FunctionName.of("is null")), IS_NOT_NULL(FunctionName.of("is not null")), IFNULL(FunctionName.of("ifnull")), @@ -231,9 +213,7 @@ public enum BuiltinFunctionName { INTERVAL(FunctionName.of("interval")), - /** - * Data Type Convert Function. - */ + /** Data Type Convert Function. */ CAST_TO_STRING(FunctionName.of("cast_to_string")), CAST_TO_BYTE(FunctionName.of("cast_to_byte")), CAST_TO_SHORT(FunctionName.of("cast_to_short")), @@ -248,9 +228,7 @@ public enum BuiltinFunctionName { CAST_TO_DATETIME(FunctionName.of("cast_to_datetime")), TYPEOF(FunctionName.of("typeof")), - /** - * Relevance Function. - */ + /** Relevance Function. */ MATCH(FunctionName.of("match")), SIMPLE_QUERY_STRING(FunctionName.of("simple_query_string")), MATCH_PHRASE(FunctionName.of("match_phrase")), @@ -264,9 +242,7 @@ public enum BuiltinFunctionName { SCOREQUERY(FunctionName.of("scorequery")), SCORE_QUERY(FunctionName.of("score_query")), - /** - * Legacy Relevance Function. - */ + /** Legacy Relevance Function. */ QUERY(FunctionName.of("query")), MATCH_QUERY(FunctionName.of("match_query")), MATCHQUERY(FunctionName.of("matchquery")), diff --git a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java index 0eb11a9280..2e16d5f01f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/BuiltinFunctionRepository.java @@ -38,10 +38,8 @@ import org.opensearch.sql.storage.StorageEngine; /** - * Builtin Function Repository. - * Repository registers datasource specific functions under datasource namespace and - * universal functions under default namespace. - * + * Builtin Function Repository. Repository registers datasource specific functions under datasource + * namespace and universal functions under default namespace. */ public class BuiltinFunctionRepository { @@ -96,23 +94,20 @@ public void register(FunctionResolver resolver) { functionResolverMap.put(resolver.getFunctionName(), resolver); } - /** - * Compile FunctionExpression using core function resolver. - * - */ - public FunctionImplementation compile(FunctionProperties functionProperties, - FunctionName functionName, List expressions) { + /** Compile FunctionExpression using core function resolver. */ + public FunctionImplementation compile( + FunctionProperties functionProperties, + FunctionName functionName, + List expressions) { return compile(functionProperties, Collections.emptyList(), functionName, expressions); } - - /** - * Compile FunctionExpression within {@link StorageEngine} provided {@link FunctionResolver}. - */ - public FunctionImplementation compile(FunctionProperties functionProperties, - Collection dataSourceFunctionResolver, - FunctionName functionName, - List expressions) { + /** Compile FunctionExpression within {@link StorageEngine} provided {@link FunctionResolver}. */ + public FunctionImplementation compile( + FunctionProperties functionProperties, + Collection dataSourceFunctionResolver, + FunctionName functionName, + List expressions) { FunctionBuilder resolvedFunctionBuilder = resolve( dataSourceFunctionResolver, @@ -134,8 +129,9 @@ public FunctionImplementation compile(FunctionProperties functionProperties, public FunctionBuilder resolve( Collection dataSourceFunctionResolver, FunctionSignature functionSignature) { - Map dataSourceFunctionMap = dataSourceFunctionResolver.stream() - .collect(Collectors.toMap(FunctionResolver::getFunctionName, t -> t)); + Map dataSourceFunctionMap = + dataSourceFunctionResolver.stream() + .collect(Collectors.toMap(FunctionResolver::getFunctionName, t -> t)); // first, resolve in datasource provide function resolver. // second, resolve in builtin function resolver. @@ -171,14 +167,13 @@ private Optional resolve( } /** - * Wrap resolved function builder's arguments by cast function to cast input expression value - * to value of target type at runtime. For example, suppose unresolved signature is - * equal(BOOL,STRING) and its resolved function builder is F with signature equal(BOOL,BOOL). - * In this case, wrap F and return equal(BOOL, cast_to_bool(STRING)). + * Wrap resolved function builder's arguments by cast function to cast input expression value to + * value of target type at runtime. For example, suppose unresolved signature is + * equal(BOOL,STRING) and its resolved function builder is F with signature equal(BOOL,BOOL). In + * this case, wrap F and return equal(BOOL, cast_to_bool(STRING)). */ - private FunctionBuilder castArguments(List sourceTypes, - List targetTypes, - FunctionBuilder funcBuilder) { + private FunctionBuilder castArguments( + List sourceTypes, List targetTypes, FunctionBuilder funcBuilder) { return (fp, arguments) -> { List argsCasted = new ArrayList<>(); for (int i = 0; i < arguments.size(); i++) { @@ -208,10 +203,10 @@ private boolean isCastRequired(ExprType sourceType, ExprType targetType) { private Function cast(Expression arg, ExprType targetType) { FunctionName castFunctionName = getCastFunctionName(targetType); if (castFunctionName == null) { - throw new ExpressionEvaluationException(StringUtils.format( - "Type conversion to type %s is not supported", targetType)); + throw new ExpressionEvaluationException( + StringUtils.format("Type conversion to type %s is not supported", targetType)); } - return functionProperties -> (Expression) compile(functionProperties, - castFunctionName, List.of(arg)); + return functionProperties -> + (Expression) compile(functionProperties, castFunctionName, List.of(arg)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java b/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java index a28fa7e0ad..5d0f31594b 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/DefaultFunctionResolver.java @@ -18,52 +18,53 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; /** - * The Function Resolver hold the overload {@link FunctionBuilder} implementation. - * is composed by {@link FunctionName} which identified the function name - * and a map of {@link FunctionSignature} and {@link FunctionBuilder} - * to represent the overloaded implementation + * The Function Resolver hold the overload {@link FunctionBuilder} implementation. is composed by + * {@link FunctionName} which identified the function name and a map of {@link FunctionSignature} + * and {@link FunctionBuilder} to represent the overloaded implementation */ @Builder @RequiredArgsConstructor public class DefaultFunctionResolver implements FunctionResolver { - @Getter - private final FunctionName functionName; + @Getter private final FunctionName functionName; + @Singular("functionBundle") private final Map functionBundle; /** - * Resolve the {@link FunctionBuilder} by using input {@link FunctionSignature}. - * If the {@link FunctionBuilder} exactly match the input {@link FunctionSignature}, return it. - * If applying the widening rule, found the most match one, return it. - * If nothing found, throw {@link ExpressionEvaluationException} + * Resolve the {@link FunctionBuilder} by using input {@link FunctionSignature}. If the {@link + * FunctionBuilder} exactly match the input {@link FunctionSignature}, return it. If applying the + * widening rule, found the most match one, return it. If nothing found, throw {@link + * ExpressionEvaluationException} * * @return function signature and its builder */ @Override public Pair resolve(FunctionSignature unresolvedSignature) { - PriorityQueue> functionMatchQueue = new PriorityQueue<>( - Map.Entry.comparingByKey()); + PriorityQueue> functionMatchQueue = + new PriorityQueue<>(Map.Entry.comparingByKey()); for (FunctionSignature functionSignature : functionBundle.keySet()) { functionMatchQueue.add( - new AbstractMap.SimpleEntry<>(unresolvedSignature.match(functionSignature), - functionSignature)); + new AbstractMap.SimpleEntry<>( + unresolvedSignature.match(functionSignature), functionSignature)); } Map.Entry bestMatchEntry = functionMatchQueue.peek(); if (FunctionSignature.isVarArgFunction(bestMatchEntry.getValue().getParamTypeList()) - && (unresolvedSignature.getParamTypeList().isEmpty() + && (unresolvedSignature.getParamTypeList().isEmpty() || unresolvedSignature.getParamTypeList().size() > 9)) { throw new ExpressionEvaluationException( - String.format("%s function expected 1-9 arguments, but got %d", - functionName, unresolvedSignature.getParamTypeList().size())); + String.format( + "%s function expected 1-9 arguments, but got %d", + functionName, unresolvedSignature.getParamTypeList().size())); } if (FunctionSignature.NOT_MATCH.equals(bestMatchEntry.getKey()) - && !FunctionSignature.isVarArgFunction(bestMatchEntry.getValue().getParamTypeList())) { + && !FunctionSignature.isVarArgFunction(bestMatchEntry.getValue().getParamTypeList())) { throw new ExpressionEvaluationException( - String.format("%s function expected %s, but get %s", functionName, + String.format( + "%s function expected %s, but get %s", + functionName, formatFunctions(functionBundle.keySet()), - unresolvedSignature.formatTypes() - )); + unresolvedSignature.formatTypes())); } else { FunctionSignature resolvedSignature = bestMatchEntry.getValue(); return Pair.of(resolvedSignature, functionBundle.get(resolvedSignature)); @@ -71,7 +72,8 @@ public Pair resolve(FunctionSignature unreso } private String formatFunctions(Set functionSignatures) { - return functionSignatures.stream().map(FunctionSignature::formatTypes) + return functionSignatures.stream() + .map(FunctionSignature::formatTypes) .collect(Collectors.joining(",", "{", "}")); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java index b6e32a1d27..a529885c16 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionBuilder.java @@ -3,15 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.util.List; import org.opensearch.sql.expression.Expression; /** - * The definition of function which create {@link FunctionImplementation} - * from input {@link Expression} list. + * The definition of function which create {@link FunctionImplementation} from input {@link + * Expression} list. */ public interface FunctionBuilder { @@ -19,7 +18,7 @@ public interface FunctionBuilder { * Create {@link FunctionImplementation} from input {@link Expression} list. * * @param functionProperties context for function execution. - * @param arguments {@link Expression} list. + * @param arguments {@link Expression} list. * @return {@link FunctionImplementation} */ FunctionImplementation apply(FunctionProperties functionProperties, List arguments); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java index c57d96caea..8ebbfd3a3c 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionDSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.util.Arrays; @@ -21,21 +20,19 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.DefaultFunctionResolver.DefaultFunctionResolverBuilder; -/** - * Function Define Utility. - */ +/** Function Define Utility. */ @UtilityClass public class FunctionDSL { /** * Define overloaded function with implementation. * * @param functionName function name. - * @param functions a list of function implementation. + * @param functions a list of function implementation. * @return FunctionResolver. */ - public static DefaultFunctionResolver define(FunctionName functionName, - SerializableFunction>... functions) { + public static DefaultFunctionResolver define( + FunctionName functionName, + SerializableFunction>... functions) { return define(functionName, List.of(functions)); } @@ -43,11 +40,13 @@ public static DefaultFunctionResolver define(FunctionName functionName, * Define overloaded function with implementation. * * @param functionName function name. - * @param functions a list of function implementation. + * @param functions a list of function implementation. * @return FunctionResolver. */ - public static DefaultFunctionResolver define(FunctionName functionName, List< - SerializableFunction>> functions) { + public static DefaultFunctionResolver define( + FunctionName functionName, + List>> + functions) { DefaultFunctionResolverBuilder builder = DefaultFunctionResolver.builder(); builder.functionName(functionName); @@ -58,7 +57,6 @@ public static DefaultFunctionResolver define(FunctionName functionName, List< return builder.build(); } - /** * Implementation of no args function that uses FunctionProperties. * @@ -67,8 +65,8 @@ public static DefaultFunctionResolver define(FunctionName functionName, List< * @return no args function implementation. */ public static SerializableFunction> - implWithProperties(SerializableFunction function, - ExprType returnType) { + implWithProperties( + SerializableFunction function, ExprType returnType) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Collections.emptyList()); @@ -95,53 +93,54 @@ public String toString() { } /** - * Implementation of a function that takes one argument, returns a value, and - * requires FunctionProperties to complete. + * Implementation of a function that takes one argument, returns a value, and requires + * FunctionProperties to complete. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. * @param argsType argument type. * @return Unary Function Implementation. */ public static SerializableFunction> implWithProperties( - SerializableBiFunction function, - ExprType returnType, - ExprType argsType) { + SerializableBiFunction function, + ExprType returnType, + ExprType argsType) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Collections.singletonList(argsType)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue value = arguments.get(0).valueOf(valueEnv); - return function.apply(functionProperties, value); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s)", functionName, - arguments.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue value = arguments.get(0).valueOf(valueEnv); + return function.apply(functionProperties, value); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s)", + functionName, + arguments.stream().map(Object::toString).collect(Collectors.joining(", "))); + } + }; return Pair.of(functionSignature, functionBuilder); }; } /** - * Implementation of a function that takes two arguments, returns a value, and - * requires FunctionProperties to complete. + * Implementation of a function that takes two arguments, returns a value, and requires + * FunctionProperties to complete. * - * @param function {@link ExprValue} based Binary function. + * @param function {@link ExprValue} based Binary function. * @param returnType return type. * @param args1Type first argument type. * @param args2Type second argument type. @@ -149,45 +148,46 @@ public String toString() { */ public static SerializableFunction> implWithProperties( - SerializableTriFunction function, - ExprType returnType, - ExprType args1Type, - ExprType args2Type) { + SerializableTriFunction function, + ExprType returnType, + ExprType args1Type, + ExprType args2Type) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Arrays.asList(args1Type, args2Type)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - return function.apply(functionProperties, arg1, arg2); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s)", functionName, - arguments.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + return function.apply(functionProperties, arg1, arg2); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s)", + functionName, + arguments.stream().map(Object::toString).collect(Collectors.joining(", "))); + } + }; return Pair.of(functionSignature, functionBuilder); }; } /** - * Implementation of a function that takes three arguments, returns a value, and - * requires FunctionProperties to complete. + * Implementation of a function that takes three arguments, returns a value, and requires + * FunctionProperties to complete. * - * @param function {@link ExprValue} based Binary function. + * @param function {@link ExprValue} based Binary function. * @param returnType return type. * @param args1Type first argument type. * @param args2Type second argument type. @@ -196,43 +196,40 @@ public String toString() { */ public static SerializableFunction> implWithProperties( - SerializableQuadFunction< - FunctionProperties, - ExprValue, - ExprValue, - ExprValue, - ExprValue> function, - ExprType returnType, - ExprType args1Type, - ExprType args2Type, - ExprType args3Type) { + SerializableQuadFunction + function, + ExprType returnType, + ExprType args1Type, + ExprType args2Type, + ExprType args3Type) { return functionName -> { FunctionSignature functionSignature = new FunctionSignature(functionName, Arrays.asList(args1Type, args2Type, args3Type)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - ExprValue arg3 = arguments.get(2).valueOf(valueEnv); - return function.apply(functionProperties, arg1, arg2, arg3); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s)", functionName, - arguments.stream() - .map(Object::toString) - .collect(Collectors.joining(", "))); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + ExprValue arg3 = arguments.get(2).valueOf(valueEnv); + return function.apply(functionProperties, arg1, arg2, arg3); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s)", + functionName, + arguments.stream().map(Object::toString).collect(Collectors.joining(", "))); + } + }; return Pair.of(functionSignature, functionBuilder); }; } @@ -240,28 +237,25 @@ public String toString() { /** * No Arg Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. * @return Unary Function Implementation. */ public static SerializableFunction> impl( - SerializableNoArgFunction function, - ExprType returnType) { + SerializableNoArgFunction function, ExprType returnType) { return implWithProperties(fp -> function.get(), returnType); } /** * Unary Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. - * @param argsType argument type. + * @param argsType argument type. * @return Unary Function Implementation. */ public static SerializableFunction> impl( - SerializableFunction function, - ExprType returnType, - ExprType argsType) { + SerializableFunction function, ExprType returnType, ExprType argsType) { return implWithProperties((fp, arg) -> function.apply(arg), returnType, argsType); } @@ -269,10 +263,10 @@ public static SerializableFunction> impl( @@ -281,17 +275,17 @@ public static SerializableFunction - function.apply(arg1, arg2), returnType, args1Type, args2Type); + return implWithProperties( + (fp, arg1, arg2) -> function.apply(arg1, arg2), returnType, args1Type, args2Type); } /** * Triple Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. - * @param args1Type argument type. - * @param args2Type argument type. + * @param args1Type argument type. + * @param args2Type argument type. * @return Binary Function Implementation. */ public static SerializableFunction> impl( @@ -305,26 +299,31 @@ public static SerializableFunction new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - ExprValue arg3 = arguments.get(2).valueOf(valueEnv); - return function.apply(arg1, arg2, arg3); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s, %s, %s)", functionName, arguments.get(0).toString(), - arguments.get(1).toString(), arguments.get(2).toString()); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + ExprValue arg3 = arguments.get(2).valueOf(valueEnv); + return function.apply(arg1, arg2, arg3); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s, %s, %s)", + functionName, + arguments.get(0).toString(), + arguments.get(1).toString(), + arguments.get(2).toString()); + } + }; return Pair.of(functionSignature, functionBuilder); }; } @@ -332,11 +331,11 @@ public String toString() { /** * Quadruple Function Implementation. * - * @param function {@link ExprValue} based unary function. + * @param function {@link ExprValue} based unary function. * @param returnType return type. - * @param args1Type argument type. - * @param args2Type argument type. - * @param args3Type argument type. + * @param args1Type argument type. + * @param args2Type argument type. + * @param args3Type argument type. * @return Quadruple Function Implementation. */ public static SerializableFunction> impl( @@ -349,42 +348,41 @@ public static SerializableFunction { FunctionSignature functionSignature = - new FunctionSignature(functionName, Arrays.asList( - args1Type, - args2Type, - args3Type, - args4Type)); + new FunctionSignature( + functionName, Arrays.asList(args1Type, args2Type, args3Type, args4Type)); FunctionBuilder functionBuilder = - (functionProperties, arguments) -> new FunctionExpression(functionName, arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - ExprValue arg1 = arguments.get(0).valueOf(valueEnv); - ExprValue arg2 = arguments.get(1).valueOf(valueEnv); - ExprValue arg3 = arguments.get(2).valueOf(valueEnv); - ExprValue arg4 = arguments.get(3).valueOf(valueEnv); - return function.apply(arg1, arg2, arg3, arg4); - } - - @Override - public ExprType type() { - return returnType; - } - - @Override - public String toString() { - return String.format("%s(%s, %s, %s, %s)", functionName, arguments.get(0).toString(), - arguments.get(1).toString(), - arguments.get(2).toString(), - arguments.get(3).toString()); - } - }; + (functionProperties, arguments) -> + new FunctionExpression(functionName, arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + ExprValue arg1 = arguments.get(0).valueOf(valueEnv); + ExprValue arg2 = arguments.get(1).valueOf(valueEnv); + ExprValue arg3 = arguments.get(2).valueOf(valueEnv); + ExprValue arg4 = arguments.get(3).valueOf(valueEnv); + return function.apply(arg1, arg2, arg3, arg4); + } + + @Override + public ExprType type() { + return returnType; + } + + @Override + public String toString() { + return String.format( + "%s(%s, %s, %s, %s)", + functionName, + arguments.get(0).toString(), + arguments.get(1).toString(), + arguments.get(2).toString(), + arguments.get(3).toString()); + } + }; return Pair.of(functionSignature, functionBuilder); }; } - /** - * Wrapper the unary ExprValue function with default NULL and MISSING handling. - */ + /** Wrapper the unary ExprValue function with default NULL and MISSING handling. */ public static SerializableFunction nullMissingHandling( SerializableFunction function) { return value -> { @@ -398,9 +396,7 @@ public static SerializableFunction nullMissingHandling( }; } - /** - * Wrapper the binary ExprValue function with default NULL and MISSING handling. - */ + /** Wrapper the binary ExprValue function with default NULL and MISSING handling. */ public static SerializableBiFunction nullMissingHandling( SerializableBiFunction function) { return (v1, v2) -> { @@ -414,9 +410,7 @@ public static SerializableBiFunction nullMissin }; } - /** - * Wrapper the triple ExprValue function with default NULL and MISSING handling. - */ + /** Wrapper the triple ExprValue function with default NULL and MISSING handling. */ public SerializableTriFunction nullMissingHandling( SerializableTriFunction function) { return (v1, v2, v3) -> { @@ -431,12 +425,12 @@ public SerializableTriFunction nullM } /** - * Wrapper the unary ExprValue function that is aware of FunctionProperties, - * with default NULL and MISSING handling. + * Wrapper the unary ExprValue function that is aware of FunctionProperties, with default NULL and + * MISSING handling. */ public static SerializableBiFunction - nullMissingHandlingWithProperties( - SerializableBiFunction implementation) { + nullMissingHandlingWithProperties( + SerializableBiFunction implementation) { return (functionProperties, v1) -> { if (v1.isMissing()) { return ExprValueUtils.missingValue(); @@ -453,8 +447,9 @@ public SerializableTriFunction nullM * with default NULL and MISSING handling. */ public static SerializableTriFunction - nullMissingHandlingWithProperties( - SerializableTriFunction implementation) { + nullMissingHandlingWithProperties( + SerializableTriFunction + implementation) { return (functionProperties, v1, v2) -> { if (v1.isMissing() || v2.isMissing()) { return ExprValueUtils.missingValue(); @@ -471,18 +466,10 @@ public SerializableTriFunction nullM * with default NULL and MISSING handling. */ public static SerializableQuadFunction< - FunctionProperties, - ExprValue, - ExprValue, - ExprValue, - ExprValue> + FunctionProperties, ExprValue, ExprValue, ExprValue, ExprValue> nullMissingHandlingWithProperties( - SerializableQuadFunction< - FunctionProperties, - ExprValue, - ExprValue, - ExprValue, - ExprValue> implementation) { + SerializableQuadFunction + implementation) { return (functionProperties, v1, v2, v3) -> { if (v1.isMissing() || v2.isMissing() || v3.isMissing()) { return ExprValueUtils.missingValue(); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java index d829e01225..4fd265a890 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionImplementation.java @@ -3,24 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.util.List; import org.opensearch.sql.expression.Expression; -/** - * The definition of Function Implementation. - */ +/** The definition of Function Implementation. */ public interface FunctionImplementation { - /** - * Get Function Name. - */ + /** Get Function Name. */ FunctionName getFunctionName(); - /** - * Get Function Arguments. - */ + /** Get Function Arguments. */ List getArguments(); } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java index cb3d5fab92..ae2987a164 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionName.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; @@ -11,14 +10,11 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * The definition of Function Name. - */ +/** The definition of Function Name. */ @EqualsAndHashCode @RequiredArgsConstructor public class FunctionName implements Serializable { - @Getter - private final String functionName; + @Getter private final String functionName; public static FunctionName of(String functionName) { return new FunctionName(functionName.toLowerCase()); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java index 4222748051..100c98bd38 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionProperties.java @@ -19,9 +19,7 @@ public class FunctionProperties implements Serializable { private final Instant nowInstant; private final ZoneId currentZoneId; - /** - * By default, use current time and current timezone. - */ + /** By default, use current time and current timezone. */ public FunctionProperties() { nowInstant = Instant.now(); currentZoneId = ZoneId.systemDefault(); @@ -29,6 +27,7 @@ public FunctionProperties() { /** * Method to access current system clock. + * * @return a ticking clock that tells the time. */ public Clock getSystemClock() { @@ -36,29 +35,28 @@ public Clock getSystemClock() { } /** - * Method to get time when query began execution. - * Clock class combines an instant Supplier and a time zone. - * @return a fixed clock that returns the time execution started at. + * Method to get time when query began execution. Clock class combines an instant Supplier and a + * time zone. * + * @return a fixed clock that returns the time execution started at. */ public Clock getQueryStartClock() { return Clock.fixed(nowInstant, currentZoneId); } - /** - * Use when compiling functions that do not rely on function properties. - */ - public static final FunctionProperties None = new FunctionProperties() { - @Override - public Clock getSystemClock() { - throw new UnexpectedCallException(); - } + /** Use when compiling functions that do not rely on function properties. */ + public static final FunctionProperties None = + new FunctionProperties() { + @Override + public Clock getSystemClock() { + throw new UnexpectedCallException(); + } - @Override - public Clock getQueryStartClock() { - throw new UnexpectedCallException(); - } - }; + @Override + public Clock getQueryStartClock() { + throw new UnexpectedCallException(); + } + }; class UnexpectedCallException extends RuntimeException { public UnexpectedCallException() { diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java index 1635b6f846..eaede1da7e 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionResolver.java @@ -8,8 +8,8 @@ import org.apache.commons.lang3.tuple.Pair; /** - * An interface for any class that can provide a {@ref FunctionBuilder} - * given a {@ref FunctionSignature}. + * An interface for any class that can provide a {@ref FunctionBuilder} given a {@ref + * FunctionSignature}. */ public interface FunctionResolver { Pair resolve(FunctionSignature unresolvedSignature); diff --git a/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java b/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java index 0c59d71c25..e1246cde28 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/FunctionSignature.java @@ -15,9 +15,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.data.type.WideningTypeRule; -/** - * Function signature is composed by function name and arguments list. - */ +/** Function signature is composed by function name and arguments list. */ @Getter @RequiredArgsConstructor @EqualsAndHashCode @@ -31,9 +29,10 @@ public class FunctionSignature { /** * calculate the function signature match degree. * - * @return EXACTLY_MATCH: exactly match - * NOT_MATCH: not match - * By widening rule, the small number means better match + * @return
+ * EXACTLY_MATCH: exactly match
+ * NOT_MATCH: not match
+ * By widening rule, the small number means better match */ public int match(FunctionSignature functionSignature) { List functionTypeList = functionSignature.getParamTypeList(); @@ -60,18 +59,14 @@ public int match(FunctionSignature functionSignature) { return matchDegree; } - /** - * util function for formatted arguments list. - */ + /** util function for formatted arguments list. */ public String formatTypes() { return getParamTypeList().stream() .map(ExprType::typeName) .collect(Collectors.joining(",", "[", "]")); } - /** - * util function - returns true if function has variable arguments. - */ + /** util function - returns true if function has variable arguments. */ protected static boolean isVarArgFunction(List argTypes) { return argTypes.size() == 1 && argTypes.get(0) == ARRAY; } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java b/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java index c5fcb010f5..8d8928c16a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/OpenSearchFunctions.java @@ -22,9 +22,7 @@ @UtilityClass public class OpenSearchFunctions { - /** - * Add functions specific to OpenSearch to repository. - */ + /** Add functions specific to OpenSearch to repository. */ public void register(BuiltinFunctionRepository repository) { repository.register(match_bool_prefix()); repository.register(multi_match(BuiltinFunctionName.MULTI_MATCH)); @@ -101,19 +99,20 @@ private static FunctionResolver nested() { @Override public Pair resolve( FunctionSignature unresolvedSignature) { - return Pair.of(unresolvedSignature, + return Pair.of( + unresolvedSignature, (functionProperties, arguments) -> - new FunctionExpression(BuiltinFunctionName.NESTED.getName(), arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - return valueEnv.resolve(getArguments().get(0)); - } - - @Override - public ExprType type() { - return getArguments().get(0).type(); - } - }); + new FunctionExpression(BuiltinFunctionName.NESTED.getName(), arguments) { + @Override + public ExprValue valueOf(Environment valueEnv) { + return valueEnv.resolve(getArguments().get(0)); + } + + @Override + public ExprType type() { + return getArguments().get(0).type(); + } + }); } @Override @@ -123,9 +122,6 @@ public FunctionName getFunctionName() { }; } - - - private static FunctionResolver score(BuiltinFunctionName score) { FunctionName funcName = score.getName(); return new RelevanceFunctionResolver(funcName); @@ -135,12 +131,11 @@ public static class OpenSearchFunction extends FunctionExpression { private final FunctionName functionName; private final List arguments; - @Getter - @Setter - private boolean isScoreTracked; + @Getter @Setter private boolean isScoreTracked; /** * Required argument constructor. + * * @param functionName name of the function * @param arguments a list of expressions */ @@ -153,9 +148,10 @@ public OpenSearchFunction(FunctionName functionName, List arguments) @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException(String.format( - "OpenSearch defined function [%s] is only supported in WHERE and HAVING clause.", - functionName)); + throw new UnsupportedOperationException( + String.format( + "OpenSearch defined function [%s] is only supported in WHERE and HAVING clause.", + functionName)); } @Override @@ -165,10 +161,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java b/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java index ef0ac9226c..ae882897d0 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/RelevanceFunctionResolver.java @@ -14,17 +14,18 @@ import org.opensearch.sql.exception.SemanticCheckException; @RequiredArgsConstructor -public class RelevanceFunctionResolver - implements FunctionResolver { +public class RelevanceFunctionResolver implements FunctionResolver { - @Getter - private final FunctionName functionName; + @Getter private final FunctionName functionName; @Override public Pair resolve(FunctionSignature unresolvedSignature) { if (!unresolvedSignature.getFunctionName().equals(functionName)) { - throw new SemanticCheckException(String.format("Expected '%s' but got '%s'", - functionName.getFunctionName(), unresolvedSignature.getFunctionName().getFunctionName())); + throw new SemanticCheckException( + String.format( + "Expected '%s' but got '%s'", + functionName.getFunctionName(), + unresolvedSignature.getFunctionName().getFunctionName())); } List paramTypes = unresolvedSignature.getParamTypeList(); // Check if all but the first parameter are of type STRING. @@ -36,13 +37,15 @@ public Pair resolve(FunctionSignature unreso } } - FunctionBuilder buildFunction = (functionProperties, args) - -> new OpenSearchFunctions.OpenSearchFunction(functionName, args); + FunctionBuilder buildFunction = + (functionProperties, args) -> + new OpenSearchFunctions.OpenSearchFunction(functionName, args); return Pair.of(unresolvedSignature, buildFunction); } - /** Returns a helpful error message when expected parameter type does not match the - * specified parameter type. + /** + * Returns a helpful error message when expected parameter type does not match the specified + * parameter type. * * @param i 0-based index of the parameter in a function signature. * @param paramType the type of the ith parameter at run-time. @@ -50,7 +53,8 @@ public Pair resolve(FunctionSignature unreso * @return A user-friendly error message that informs of the type difference. */ private String getWrongParameterErrorMessage(int i, ExprType paramType, ExprType expectedType) { - return String.format("Expected type %s instead of %s for parameter #%d", + return String.format( + "Expected type %s instead of %s for parameter #%d", expectedType.typeName(), paramType.typeName(), i + 1); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java index 5b3aaf31f3..9f182e4c85 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableBiFunction.java @@ -3,14 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; import java.util.function.BiFunction; -/** - * Serializable BiFunction. - */ -public interface SerializableBiFunction extends BiFunction, Serializable { -} +/** Serializable BiFunction. */ +public interface SerializableBiFunction extends BiFunction, Serializable {} diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java index 467c034c39..fb3e2f2cfb 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableFunction.java @@ -3,11 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; import java.util.function.Function; -public interface SerializableFunction extends Function, Serializable { -} +public interface SerializableFunction extends Function, Serializable {} diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java index e68d6084b4..6eaf699bf9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableNoArgFunction.java @@ -3,14 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; import java.util.function.Supplier; -/** - * Serializable no argument function. - */ -public interface SerializableNoArgFunction extends Supplier, Serializable { -} +/** Serializable no argument function. */ +public interface SerializableNoArgFunction extends Supplier, Serializable {} diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java index 056a17d5b3..7285d9a32f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableQuadFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; diff --git a/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java b/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java index 911012fcdb..e980b1c82a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/SerializableTriFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.function; import java.io.Serializable; diff --git a/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java b/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java index f35ffe4898..b8b90bf5e3 100644 --- a/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java +++ b/core/src/main/java/org/opensearch/sql/expression/function/TableFunctionImplementation.java @@ -9,11 +9,8 @@ import org.opensearch.sql.storage.Table; -/** - * Interface for table function which returns Table when executed. - */ +/** Interface for table function which returns Table when executed. */ public interface TableFunctionImplementation extends FunctionImplementation { Table applyArguments(); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java index e25c5cda20..82b91e1d34 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/ArithmeticFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.arthmetic; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; @@ -30,11 +29,11 @@ import org.opensearch.sql.expression.function.FunctionName; /** - * The definition of arithmetic function - * add, Accepts two numbers and produces a number. - * subtract, Accepts two numbers and produces a number. - * multiply, Accepts two numbers and produces a number. - * divide, Accepts two numbers and produces a number. + * The definition of arithmetic function
+ * add, Accepts two numbers and produces a number.
+ * subtract, Accepts two numbers and produces a number.
+ * multiply, Accepts two numbers and produces a number.
+ * divide, Accepts two numbers and produces a number.
* module, Accepts two numbers and produces a number. */ @UtilityClass @@ -59,33 +58,49 @@ public static void register(BuiltinFunctionRepository repository) { } /** - * Definition of add(x, y) function. - * Returns the number x plus number y - * The supported signature of add function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of add(x, y) function.
+ * Returns the number x plus number y
+ * The supported signature of add function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver addBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> new ExprByteValue(v1.byteValue() + v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> new ExprShortValue(v1.shortValue() + v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprIntegerValue(Math.addExact(v1.integerValue(), v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> new ExprLongValue(Math.addExact(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> new ExprFloatValue(v1.floatValue() + v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(v1.doubleValue() + v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + return define( + functionName, + impl( + nullMissingHandling((v1, v2) -> new ExprByteValue(v1.byteValue() + v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling((v1, v2) -> new ExprShortValue(v1.shortValue() + v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + new ExprIntegerValue(Math.addExact(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> new ExprLongValue(Math.addExact(v1.longValue(), v2.longValue()))), + LONG, + LONG, + LONG), + impl( + nullMissingHandling((v1, v2) -> new ExprFloatValue(v1.floatValue() + v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(v1.doubleValue() + v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver add() { @@ -97,39 +112,69 @@ private static DefaultFunctionResolver addFunction() { } /** - * Definition of divide(x, y) function. - * Returns the number x divided by number y - * The supported signature of divide function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of divide(x, y) function.
+ * Returns the number x divided by number y
+ * The supported signature of divide function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver divideBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> v2.byteValue() == 0 ? ExprNullValue.of() : - new ExprByteValue(v1.byteValue() / v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprShortValue(v1.shortValue() / v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> v2.integerValue() == 0 ? ExprNullValue.of() : - new ExprIntegerValue(v1.integerValue() / v2.integerValue())), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> v2.longValue() == 0 ? ExprNullValue.of() : - new ExprLongValue(v1.longValue() / v2.longValue())), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> v2.floatValue() == 0 ? ExprNullValue.of() : - new ExprFloatValue(v1.floatValue() / v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> v2.doubleValue() == 0 ? ExprNullValue.of() : - new ExprDoubleValue(v1.doubleValue() / v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + return define( + functionName, + impl( + nullMissingHandling( + (v1, v2) -> + v2.byteValue() == 0 + ? ExprNullValue.of() + : new ExprByteValue(v1.byteValue() / v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprShortValue(v1.shortValue() / v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.integerValue() == 0 + ? ExprNullValue.of() + : new ExprIntegerValue(v1.integerValue() / v2.integerValue())), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> + v2.longValue() == 0 + ? ExprNullValue.of() + : new ExprLongValue(v1.longValue() / v2.longValue())), + LONG, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v2.floatValue() == 0 + ? ExprNullValue.of() + : new ExprFloatValue(v1.floatValue() / v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.doubleValue() == 0 + ? ExprNullValue.of() + : new ExprDoubleValue(v1.doubleValue() / v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver divide() { @@ -141,39 +186,69 @@ private static DefaultFunctionResolver divideFunction() { } /** - * Definition of modulus(x, y) function. - * Returns the number x modulo by number y - * The supported signature of modulo function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of modulus(x, y) function.
+ * Returns the number x modulo by number y
+ * The supported signature of modulo function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver modulusBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> v2.byteValue() == 0 ? ExprNullValue.of() : - new ExprByteValue(v1.byteValue() % v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprShortValue(v1.shortValue() % v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> v2.integerValue() == 0 ? ExprNullValue.of() : - new ExprIntegerValue(v1.integerValue() % v2.integerValue())), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> v2.longValue() == 0 ? ExprNullValue.of() : - new ExprLongValue(v1.longValue() % v2.longValue())), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> v2.floatValue() == 0 ? ExprNullValue.of() : - new ExprFloatValue(v1.floatValue() % v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> v2.doubleValue() == 0 ? ExprNullValue.of() : - new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + return define( + functionName, + impl( + nullMissingHandling( + (v1, v2) -> + v2.byteValue() == 0 + ? ExprNullValue.of() + : new ExprByteValue(v1.byteValue() % v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprShortValue(v1.shortValue() % v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.integerValue() == 0 + ? ExprNullValue.of() + : new ExprIntegerValue(v1.integerValue() % v2.integerValue())), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> + v2.longValue() == 0 + ? ExprNullValue.of() + : new ExprLongValue(v1.longValue() % v2.longValue())), + LONG, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v2.floatValue() == 0 + ? ExprNullValue.of() + : new ExprFloatValue(v1.floatValue() % v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.doubleValue() == 0 + ? ExprNullValue.of() + : new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver mod() { @@ -189,34 +264,49 @@ private static DefaultFunctionResolver modulusFunction() { } /** - * Definition of multiply(x, y) function. - * Returns the number x multiplied by number y - * The supported signature of multiply function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) - * -> wider type between types of x and y + * Definition of multiply(x, y) function.
+ * Returns the number x multiplied by number y
+ * The supported signature of multiply function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
+ * -> wider type between types of x and y */ private static DefaultFunctionResolver multiplyBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> new ExprByteValue(v1.byteValue() * v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> new ExprShortValue(v1.shortValue() * v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprIntegerValue(Math.multiplyExact(v1.integerValue(), - v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( + return define( + functionName, + impl( + nullMissingHandling((v1, v2) -> new ExprByteValue(v1.byteValue() * v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling((v1, v2) -> new ExprShortValue(v1.shortValue() * v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + new ExprIntegerValue(Math.multiplyExact(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( (v1, v2) -> new ExprLongValue(Math.multiplyExact(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> new ExprFloatValue(v1.floatValue() * v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( + LONG, + LONG, + LONG), + impl( + nullMissingHandling((v1, v2) -> new ExprFloatValue(v1.floatValue() * v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( (v1, v2) -> new ExprDoubleValue(v1.doubleValue() * v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver multiply() { @@ -228,34 +318,49 @@ private static DefaultFunctionResolver multiplyFunction() { } /** - * Definition of subtract(x, y) function. - * Returns the number x minus number y - * The supported signature of subtract function is - * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE) + * Definition of subtract(x, y) function.
+ * Returns the number x minus number y
+ * The supported signature of subtract function is
+ * (x: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE, y: BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver subtractBase(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling( - (v1, v2) -> new ExprByteValue(v1.byteValue() - v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling( - (v1, v2) -> new ExprShortValue(v1.shortValue() - v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprIntegerValue(Math.subtractExact(v1.integerValue(), - v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling( + return define( + functionName, + impl( + nullMissingHandling((v1, v2) -> new ExprByteValue(v1.byteValue() - v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling((v1, v2) -> new ExprShortValue(v1.shortValue() - v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + new ExprIntegerValue(Math.subtractExact(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( (v1, v2) -> new ExprLongValue(Math.subtractExact(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> new ExprFloatValue(v1.floatValue() - v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling( + LONG, + LONG, + LONG), + impl( + nullMissingHandling((v1, v2) -> new ExprFloatValue(v1.floatValue() - v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( (v1, v2) -> new ExprDoubleValue(v1.doubleValue() - v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); + DOUBLE, + DOUBLE, + DOUBLE)); } private static DefaultFunctionResolver subtract() { diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java index 810d292ca2..22f4b76573 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/arthmetic/MathematicalFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.arthmetic; import static org.opensearch.sql.data.type.ExprCoreType.BYTE; @@ -102,126 +101,155 @@ public static void register(BuiltinFunctionRepository repository) { * @return DefaultFunctionResolver for math functions. */ private static DefaultFunctionResolver baseMathFunction( - FunctionName functionName, SerializableFunction formula, ExprCoreType returnType) { - return define(functionName, ExprCoreType.numberTypes().stream().map(type -> - impl(nullMissingHandling(formula), returnType, type)).collect(Collectors.toList())); + FunctionName functionName, + SerializableFunction formula, + ExprCoreType returnType) { + return define( + functionName, + ExprCoreType.numberTypes().stream() + .map(type -> impl(nullMissingHandling(formula), returnType, type)) + .collect(Collectors.toList())); } /** - * Definition of abs() function. The supported signature of abs() function are INT -> INT LONG -> - * LONG FLOAT -> FLOAT DOUBLE -> DOUBLE + * Definition of abs() function.<\b>
+ * The supported signature of abs() function are
+ * INT/LONG/FLOAT/DOUBLE -> INT/LONG/FLOAT/DOUBLE */ private static DefaultFunctionResolver abs() { - return define(BuiltinFunctionName.ABS.getName(), - impl(nullMissingHandling(v -> new ExprByteValue(Math.abs(v.byteValue()))), - BYTE, BYTE), - impl(nullMissingHandling(v -> new ExprShortValue(Math.abs(v.shortValue()))), - SHORT, SHORT), - impl(nullMissingHandling(v -> new ExprIntegerValue(Math.abs(v.integerValue()))), - INTEGER, INTEGER), - impl(nullMissingHandling(v -> new ExprLongValue(Math.abs(v.longValue()))), - LONG, LONG), - impl(nullMissingHandling(v -> new ExprFloatValue(Math.abs(v.floatValue()))), - FLOAT, FLOAT), - impl(nullMissingHandling(v -> new ExprDoubleValue(Math.abs(v.doubleValue()))), - DOUBLE, DOUBLE) - ); - } - - /** - * Definition of ceil(x)/ceiling(x) function. Calculate the next highest integer that x rounds up - * to The supported signature of ceil/ceiling function is DOUBLE -> INTEGER + return define( + BuiltinFunctionName.ABS.getName(), + impl(nullMissingHandling(v -> new ExprByteValue(Math.abs(v.byteValue()))), BYTE, BYTE), + impl(nullMissingHandling(v -> new ExprShortValue(Math.abs(v.shortValue()))), SHORT, SHORT), + impl( + nullMissingHandling(v -> new ExprIntegerValue(Math.abs(v.integerValue()))), + INTEGER, + INTEGER), + impl(nullMissingHandling(v -> new ExprLongValue(Math.abs(v.longValue()))), LONG, LONG), + impl(nullMissingHandling(v -> new ExprFloatValue(Math.abs(v.floatValue()))), FLOAT, FLOAT), + impl( + nullMissingHandling(v -> new ExprDoubleValue(Math.abs(v.doubleValue()))), + DOUBLE, + DOUBLE)); + } + + /** + * Definition of ceil(x)/ceiling(x) function.<\b>
+ * Calculate the next highest integer that x rounds up to The supported signature of ceil/ceiling + * function is DOUBLE -> INTEGER */ private static DefaultFunctionResolver ceil() { - return define(BuiltinFunctionName.CEIL.getName(), - impl(nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), - LONG, DOUBLE) - ); + return define( + BuiltinFunctionName.CEIL.getName(), + impl( + nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), LONG, DOUBLE)); } private static DefaultFunctionResolver ceiling() { - return define(BuiltinFunctionName.CEILING.getName(), - impl(nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), - LONG, DOUBLE) - ); + return define( + BuiltinFunctionName.CEILING.getName(), + impl( + nullMissingHandling(v -> new ExprLongValue(Math.ceil(v.doubleValue()))), LONG, DOUBLE)); } /** - * Definition of conv(x, a, b) function. - * Convert number x from base a to base b - * The supported signature of floor function is - * (STRING, INTEGER, INTEGER) -> STRING + * Definition of conv(x, a, b) function.<\b>
+ * Convert number x from base a to base b
+ * The supported signature of floor function is
+ * (STRING, INTEGER, INTEGER) -> STRING
* (INTEGER, INTEGER, INTEGER) -> STRING */ private static DefaultFunctionResolver conv() { - return define(BuiltinFunctionName.CONV.getName(), - impl(nullMissingHandling((x, a, b) -> new ExprStringValue( - Integer.toString(Integer.parseInt(x.stringValue(), a.integerValue()), - b.integerValue()))), - STRING, STRING, INTEGER, INTEGER), - impl(nullMissingHandling((x, a, b) -> new ExprStringValue( - Integer.toString(Integer.parseInt(x.integerValue().toString(), a.integerValue()), - b.integerValue()))), - STRING, INTEGER, INTEGER, INTEGER) - ); - } - - /** - * Definition of crc32(x) function. - * Calculate a cyclic redundancy check value and returns a 32-bit unsigned value - * The supported signature of crc32 function is + return define( + BuiltinFunctionName.CONV.getName(), + impl( + nullMissingHandling( + (x, a, b) -> + new ExprStringValue( + Integer.toString( + Integer.parseInt(x.stringValue(), a.integerValue()), + b.integerValue()))), + STRING, + STRING, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (x, a, b) -> + new ExprStringValue( + Integer.toString( + Integer.parseInt(x.integerValue().toString(), a.integerValue()), + b.integerValue()))), + STRING, + INTEGER, + INTEGER, + INTEGER)); + } + + /** + * Definition of crc32(x) function.<\b>
+ * Calculate a cyclic redundancy check value and returns a 32-bit unsigned value
+ * The supported signature of crc32 function is
* STRING -> LONG */ private static DefaultFunctionResolver crc32() { - return define(BuiltinFunctionName.CRC32.getName(), - impl(nullMissingHandling(v -> { - CRC32 crc = new CRC32(); - crc.update(v.stringValue().getBytes()); - return new ExprLongValue(crc.getValue()); - }), - LONG, STRING) - ); + return define( + BuiltinFunctionName.CRC32.getName(), + impl( + nullMissingHandling( + v -> { + CRC32 crc = new CRC32(); + crc.update(v.stringValue().getBytes()); + return new ExprLongValue(crc.getValue()); + }), + LONG, + STRING)); } /** - * Definition of e() function. - * Get the Euler's number. - * () -> DOUBLE + * Definition of e() function.
+ * Get the Euler's number. () -> DOUBLE */ private static DefaultFunctionResolver euler() { - return define(BuiltinFunctionName.E.getName(), - impl(() -> new ExprDoubleValue(Math.E), DOUBLE) - ); + return define(BuiltinFunctionName.E.getName(), impl(() -> new ExprDoubleValue(Math.E), DOUBLE)); } /** - * Definition of exp(x) function. Calculate exponent function e to the x - * The supported signature of exp function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + * Definition of exp(x) function.
+ * Calculate exponent function e to the x The supported signature of exp function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver exp() { - return baseMathFunction(BuiltinFunctionName.EXP.getName(), - v -> new ExprDoubleValue(Math.exp(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.EXP.getName(), + v -> new ExprDoubleValue(Math.exp(v.doubleValue())), + DOUBLE); } /** - * Definition of expm1(x) function. Calculate exponent function e to the x, minus 1 - * The supported signature of exp function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + * Definition of expm1(x) function.
+ * Calculate exponent function e to the x, minus 1 The supported signature of exp function is + * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver expm1() { - return baseMathFunction(BuiltinFunctionName.EXPM1.getName(), - v -> new ExprDoubleValue(Math.expm1(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.EXPM1.getName(), + v -> new ExprDoubleValue(Math.expm1(v.doubleValue())), + DOUBLE); } /** - * Definition of floor(x) function. Calculate the next nearest whole integer that x rounds down to - * The supported signature of floor function is DOUBLE -> INTEGER + * Definition of floor(x) function.
+ * Calculate the next nearest whole integer that x rounds down to The supported signature of floor + * function is DOUBLE -> INTEGER */ private static DefaultFunctionResolver floor() { - return define(BuiltinFunctionName.FLOOR.getName(), - impl(nullMissingHandling(v -> new ExprLongValue(Math.floor(v.doubleValue()))), - LONG, DOUBLE) - ); + return define( + BuiltinFunctionName.FLOOR.getName(), + impl( + nullMissingHandling(v -> new ExprLongValue(Math.floor(v.doubleValue()))), + LONG, + DOUBLE)); } /** @@ -229,108 +257,171 @@ private static DefaultFunctionResolver floor() { * ln function is INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver ln() { - return baseMathFunction(BuiltinFunctionName.LN.getName(), - v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.LN.getName(), + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log(v.doubleValue())), + DOUBLE); } /** - * Definition of log(b, x) function. Calculate the logarithm of x using b as the base The - * supported signature of log function is (b: INTEGER/LONG/FLOAT/DOUBLE, x: - * INTEGER/LONG/FLOAT/DOUBLE]) -> DOUBLE + * Definition of log(b, x) function.
+ * Calculate the logarithm of x using b as the base The supported signature of log function is
+ * (b: INTEGER/LONG/FLOAT/DOUBLE, x: INTEGER/LONG/FLOAT/DOUBLE]) -> DOUBLE */ private static DefaultFunctionResolver log() { - ImmutableList.Builder>> builder = new ImmutableList.Builder<>(); + ImmutableList.Builder< + SerializableFunction>> + builder = new ImmutableList.Builder<>(); // build unary log(x), SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE for (ExprType type : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling(v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log(v.doubleValue()))), - DOUBLE, type)); + builder.add( + impl( + nullMissingHandling( + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log(v.doubleValue()))), + DOUBLE, + type)); } // build binary function log(b, x) for (ExprType baseType : ExprCoreType.numberTypes()) { for (ExprType numberType : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling((b, x) -> b.doubleValue() <= 0 || x.doubleValue() <= 0 - ? ExprNullValue.of() : new ExprDoubleValue( - Math.log(x.doubleValue()) / Math.log(b.doubleValue()))), - DOUBLE, baseType, numberType)); + builder.add( + impl( + nullMissingHandling( + (b, x) -> + b.doubleValue() <= 0 || x.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue( + Math.log(x.doubleValue()) / Math.log(b.doubleValue()))), + DOUBLE, + baseType, + numberType)); } } return define(BuiltinFunctionName.LOG.getName(), builder.build()); } - /** - * Definition of log10(x) function. Calculate base-10 logarithm of x The supported signature of + * Definition of log10(x) function.
+ * Calculate base-10 logarithm of x The supported signature of
* log function is SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver log10() { - return baseMathFunction(BuiltinFunctionName.LOG10.getName(), - v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log10(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.LOG10.getName(), + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log10(v.doubleValue())), + DOUBLE); } /** - * Definition of log2(x) function. Calculate base-2 logarithm of x The supported signature of log + * Definition of log2(x) function.
+ * Calculate base-2 logarithm of x The supported signature of log
* function is SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver log2() { - return baseMathFunction(BuiltinFunctionName.LOG2.getName(), - v -> v.doubleValue() <= 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.log(v.doubleValue()) / Math.log(2)), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.LOG2.getName(), + v -> + v.doubleValue() <= 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.log(v.doubleValue()) / Math.log(2)), + DOUBLE); } /** - * Definition of mod(x, y) function. - * Calculate the remainder of x divided by y - * The supported signature of mod function is - * (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) + * Definition of mod(x, y) function.
+ * Calculate the remainder of x divided by y
+ * The supported signature of mod function is
+ * (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE)
* -> wider type between types of x and y */ private static DefaultFunctionResolver mod() { - return define(BuiltinFunctionName.MOD.getName(), - impl(nullMissingHandling((v1, v2) -> v2.byteValue() == 0 ? ExprNullValue.of() : - new ExprByteValue(v1.byteValue() % v2.byteValue())), - BYTE, BYTE, BYTE), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprShortValue(v1.shortValue() % v2.shortValue())), - SHORT, SHORT, SHORT), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprIntegerValue(Math.floorMod(v1.integerValue(), v2.integerValue()))), - INTEGER, INTEGER, INTEGER), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprLongValue(Math.floorMod(v1.longValue(), v2.longValue()))), - LONG, LONG, LONG), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprFloatValue(v1.floatValue() % v2.floatValue())), - FLOAT, FLOAT, FLOAT), - impl(nullMissingHandling((v1, v2) -> v2.shortValue() == 0 ? ExprNullValue.of() : - new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), - DOUBLE, DOUBLE, DOUBLE) - ); - } - - /** - * Definition of pi() function. - * Get the value of pi. + return define( + BuiltinFunctionName.MOD.getName(), + impl( + nullMissingHandling( + (v1, v2) -> + v2.byteValue() == 0 + ? ExprNullValue.of() + : new ExprByteValue(v1.byteValue() % v2.byteValue())), + BYTE, + BYTE, + BYTE), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprShortValue(v1.shortValue() % v2.shortValue())), + SHORT, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprIntegerValue( + Math.floorMod(v1.integerValue(), v2.integerValue()))), + INTEGER, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprLongValue(Math.floorMod(v1.longValue(), v2.longValue()))), + LONG, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprFloatValue(v1.floatValue() % v2.floatValue())), + FLOAT, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v2.shortValue() == 0 + ? ExprNullValue.of() + : new ExprDoubleValue(v1.doubleValue() % v2.doubleValue())), + DOUBLE, + DOUBLE, + DOUBLE)); + } + + /** + * Definition of pi() function.
+ * Get the value of pi.
* () -> DOUBLE */ private static DefaultFunctionResolver pi() { - return define(BuiltinFunctionName.PI.getName(), - impl(() -> new ExprDoubleValue(Math.PI), DOUBLE) - ); + return define( + BuiltinFunctionName.PI.getName(), impl(() -> new ExprDoubleValue(Math.PI), DOUBLE)); } /** - * Definition of pow(x, y)/power(x, y) function. - * Calculate the value of x raised to the power of y - * The supported signature of pow/power function is - * (INTEGER, INTEGER) -> DOUBLE - * (LONG, LONG) -> DOUBLE - * (FLOAT, FLOAT) -> DOUBLE + * Definition of pow(x, y)/power(x, y) function.
+ * Calculate the value of x raised to the power of y
+ * The supported signature of pow/power function is
+ * (INTEGER, INTEGER) -> DOUBLE
+ * (LONG, LONG) -> DOUBLE
+ * (FLOAT, FLOAT) -> DOUBLE
* (DOUBLE, DOUBLE) -> DOUBLE */ private static DefaultFunctionResolver pow() { @@ -341,336 +432,485 @@ private static DefaultFunctionResolver power() { return define(BuiltinFunctionName.POWER.getName(), powerFunctionImpl()); } - private List>> powerFunctionImpl() { + private List>> + powerFunctionImpl() { return Arrays.asList( - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(Math.pow(v1.shortValue(), v2.shortValue()))), - DOUBLE, SHORT, SHORT), - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(Math.pow(v1.integerValue(), v2.integerValue()))), - DOUBLE, INTEGER, INTEGER), - impl(nullMissingHandling( - (v1, v2) -> new ExprDoubleValue(Math.pow(v1.longValue(), v2.longValue()))), - DOUBLE, LONG, LONG), - impl(nullMissingHandling( - (v1, v2) -> v1.floatValue() <= 0 && v2.floatValue() != Math.floor(v2.floatValue()) - ? ExprNullValue.of() : - new ExprDoubleValue(Math.pow(v1.floatValue(), v2.floatValue()))), - DOUBLE, FLOAT, FLOAT), - impl(nullMissingHandling( - (v1, v2) -> v1.doubleValue() <= 0 && v2.doubleValue() != Math.floor(v2.doubleValue()) - ? ExprNullValue.of() : - new ExprDoubleValue(Math.pow(v1.doubleValue(), v2.doubleValue()))), - DOUBLE, DOUBLE, DOUBLE)); - } - - /** - * Definition of rand() and rand(N) function. - * rand() returns a random floating-point value in the range 0 <= value < 1.0 - * If integer N is specified, the seed is initialized prior to execution. + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(Math.pow(v1.shortValue(), v2.shortValue()))), + DOUBLE, + SHORT, + SHORT), + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(Math.pow(v1.integerValue(), v2.integerValue()))), + DOUBLE, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (v1, v2) -> new ExprDoubleValue(Math.pow(v1.longValue(), v2.longValue()))), + DOUBLE, + LONG, + LONG), + impl( + nullMissingHandling( + (v1, v2) -> + v1.floatValue() <= 0 && v2.floatValue() != Math.floor(v2.floatValue()) + ? ExprNullValue.of() + : new ExprDoubleValue(Math.pow(v1.floatValue(), v2.floatValue()))), + DOUBLE, + FLOAT, + FLOAT), + impl( + nullMissingHandling( + (v1, v2) -> + v1.doubleValue() <= 0 && v2.doubleValue() != Math.floor(v2.doubleValue()) + ? ExprNullValue.of() + : new ExprDoubleValue(Math.pow(v1.doubleValue(), v2.doubleValue()))), + DOUBLE, + DOUBLE, + DOUBLE)); + } + + /** + * Definition of rand() and rand(N) function.
+ * rand() returns a random floating-point value in the range 0 <= value < 1.0
+ * If integer N is specified, the seed is initialized prior to execution.
* One implication of this behavior is with identical argument N,rand(N) returns the same value - * each time, and thus produces a repeatable sequence of column values. - * The supported signature of rand function is - * ([INTEGER]) -> FLOAT + *
+ * each time, and thus produces a repeatable sequence of column values. The supported signature of + *
+ * rand function is ([INTEGER]) -> FLOAT */ private static DefaultFunctionResolver rand() { - return define(BuiltinFunctionName.RAND.getName(), + return define( + BuiltinFunctionName.RAND.getName(), impl(() -> new ExprFloatValue(new Random().nextFloat()), FLOAT), - impl(nullMissingHandling( - v -> new ExprFloatValue(new Random(v.integerValue()).nextFloat())), FLOAT, INTEGER) - ); + impl( + nullMissingHandling(v -> new ExprFloatValue(new Random(v.integerValue()).nextFloat())), + FLOAT, + INTEGER)); } /** - * Definition of rint(x) function. - * Returns the closest whole integer value to x - * The supported signature is + * Definition of rint(x) function.
+ * Returns the closest whole integer value to x
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver rint() { - return baseMathFunction(BuiltinFunctionName.RINT.getName(), - v -> new ExprDoubleValue(Math.rint(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.RINT.getName(), + v -> new ExprDoubleValue(Math.rint(v.doubleValue())), + DOUBLE); } /** - * Definition of round(x)/round(x, d) function. - * Rounds the argument x to d decimal places, d defaults to 0 if not specified. - * The supported signature of round function is - * (x: INTEGER [, y: INTEGER]) -> INTEGER - * (x: LONG [, y: INTEGER]) -> LONG - * (x: FLOAT [, y: INTEGER]) -> FLOAT + * Definition of round(x)/round(x, d) function.
+ * Rounds the argument x to d decimal places, d defaults to 0 if not specified.
+ * The supported signature of round function is
+ * (x: INTEGER [, y: INTEGER]) -> INTEGER
+ * (x: LONG [, y: INTEGER]) -> LONG
+ * (x: FLOAT [, y: INTEGER]) -> FLOAT
* (x: DOUBLE [, y: INTEGER]) -> DOUBLE */ private static DefaultFunctionResolver round() { - return define(BuiltinFunctionName.ROUND.getName(), + return define( + BuiltinFunctionName.ROUND.getName(), // rand(x) - impl(nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.integerValue()))), - LONG, INTEGER), - impl(nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.longValue()))), - LONG, LONG), - impl(nullMissingHandling(v -> new ExprDoubleValue((double) Math.round(v.floatValue()))), - DOUBLE, FLOAT), - impl(nullMissingHandling(v -> new ExprDoubleValue(new BigDecimal(v.doubleValue()) - .setScale(0, RoundingMode.HALF_UP).doubleValue())), - DOUBLE, DOUBLE), + impl( + nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.integerValue()))), + LONG, + INTEGER), + impl( + nullMissingHandling(v -> new ExprLongValue((long) Math.round(v.longValue()))), + LONG, + LONG), + impl( + nullMissingHandling(v -> new ExprDoubleValue((double) Math.round(v.floatValue()))), + DOUBLE, + FLOAT), + impl( + nullMissingHandling( + v -> + new ExprDoubleValue( + new BigDecimal(v.doubleValue()) + .setScale(0, RoundingMode.HALF_UP) + .doubleValue())), + DOUBLE, + DOUBLE), // rand(x, d) - impl(nullMissingHandling((x, d) -> new ExprLongValue(new BigDecimal(x.integerValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).longValue())), - LONG, INTEGER, INTEGER), - impl(nullMissingHandling((x, d) -> new ExprLongValue(new BigDecimal(x.longValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).longValue())), - LONG, LONG, INTEGER), - impl(nullMissingHandling((x, d) -> new ExprDoubleValue(new BigDecimal(x.floatValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).doubleValue())), - DOUBLE, FLOAT, INTEGER), - impl(nullMissingHandling((x, d) -> new ExprDoubleValue(new BigDecimal(x.doubleValue()) - .setScale(d.integerValue(), RoundingMode.HALF_UP).doubleValue())), - DOUBLE, DOUBLE, INTEGER)); - } - - /** - * Definition of sign(x) function. - * Returns the sign of the argument as -1, 0, or 1 - * depending on whether x is negative, zero, or positive - * The supported signature is + impl( + nullMissingHandling( + (x, d) -> + new ExprLongValue( + new BigDecimal(x.integerValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .longValue())), + LONG, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (x, d) -> + new ExprLongValue( + new BigDecimal(x.longValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .longValue())), + LONG, + LONG, + INTEGER), + impl( + nullMissingHandling( + (x, d) -> + new ExprDoubleValue( + new BigDecimal(x.floatValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .doubleValue())), + DOUBLE, + FLOAT, + INTEGER), + impl( + nullMissingHandling( + (x, d) -> + new ExprDoubleValue( + new BigDecimal(x.doubleValue()) + .setScale(d.integerValue(), RoundingMode.HALF_UP) + .doubleValue())), + DOUBLE, + DOUBLE, + INTEGER)); + } + + /** + * Definition of sign(x) function.
+ * Returns the sign of the argument as -1, 0, or 1
+ * depending on whether x is negative, zero, or positive
+ * The supported signature is
* SHORT/INTEGER/LONG/FLOAT/DOUBLE -> INTEGER */ private static DefaultFunctionResolver sign() { - return baseMathFunction(BuiltinFunctionName.SIGN.getName(), - v -> new ExprIntegerValue(Math.signum(v.doubleValue())), INTEGER); + return baseMathFunction( + BuiltinFunctionName.SIGN.getName(), + v -> new ExprIntegerValue(Math.signum(v.doubleValue())), + INTEGER); } /** - * Definition of signum(x) function. - * Returns the sign of the argument as -1.0, 0, or 1.0 - * depending on whether x is negative, zero, or positive - * The supported signature is + * Definition of signum(x) function.
+ * Returns the sign of the argument as -1.0, 0, or 1.0
+ * depending on whether x is negative, zero, or positive
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> INTEGER */ private static DefaultFunctionResolver signum() { - return baseMathFunction(BuiltinFunctionName.SIGNUM.getName(), - v -> new ExprIntegerValue(Math.signum(v.doubleValue())), INTEGER); + return baseMathFunction( + BuiltinFunctionName.SIGNUM.getName(), + v -> new ExprIntegerValue(Math.signum(v.doubleValue())), + INTEGER); } /** - * Definition of sinh(x) function. - * Returns the hyperbolix sine of x, defined as (((e^x) - (e^(-x))) / 2) - * The supported signature is + * Definition of sinh(x) function.
+ * Returns the hyperbolix sine of x, defined as (((e^x) - (e^(-x))) / 2)
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver sinh() { - return baseMathFunction(BuiltinFunctionName.SINH.getName(), - v -> new ExprDoubleValue(Math.sinh(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.SINH.getName(), + v -> new ExprDoubleValue(Math.sinh(v.doubleValue())), + DOUBLE); } /** - * Definition of sqrt(x) function. - * Calculate the square root of a non-negative number x - * The supported signature is + * Definition of sqrt(x) function.
+ * Calculate the square root of a non-negative number x
+ * The supported signature is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver sqrt() { - return baseMathFunction(BuiltinFunctionName.SQRT.getName(), - v -> v.doubleValue() < 0 ? ExprNullValue.of() : - new ExprDoubleValue(Math.sqrt(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.SQRT.getName(), + v -> + v.doubleValue() < 0 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.sqrt(v.doubleValue())), + DOUBLE); } /** - * Definition of cbrt(x) function. - * Calculate the cube root of a number x - * The supported signature is + * Definition of cbrt(x) function.
+ * Calculate the cube root of a number x
+ * The supported signature is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cbrt() { - return baseMathFunction(BuiltinFunctionName.CBRT.getName(), - v -> new ExprDoubleValue(Math.cbrt(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.CBRT.getName(), + v -> new ExprDoubleValue(Math.cbrt(v.doubleValue())), + DOUBLE); } /** - * Definition of truncate(x, d) function. - * Returns the number x, truncated to d decimal places - * The supported signature of round function is - * (x: INTEGER, y: INTEGER) -> LONG - * (x: LONG, y: INTEGER) -> LONG - * (x: FLOAT, y: INTEGER) -> DOUBLE + * Definition of truncate(x, d) function.
+ * Returns the number x, truncated to d decimal places
+ * The supported signature of round function is
+ * (x: INTEGER, y: INTEGER) -> LONG
+ * (x: LONG, y: INTEGER) -> LONG
+ * (x: FLOAT, y: INTEGER) -> DOUBLE
* (x: DOUBLE, y: INTEGER) -> DOUBLE */ private static DefaultFunctionResolver truncate() { - return define(BuiltinFunctionName.TRUNCATE.getName(), - impl(nullMissingHandling((x, y) -> new ExprLongValue(BigDecimal.valueOf(x.integerValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).longValue())), - LONG, INTEGER, INTEGER), - impl(nullMissingHandling((x, y) -> new ExprLongValue(BigDecimal.valueOf(x.longValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).longValue())), - LONG, LONG, INTEGER), - impl(nullMissingHandling((x, y) -> new ExprDoubleValue(BigDecimal.valueOf(x.floatValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).doubleValue())), - DOUBLE, FLOAT, INTEGER), - impl(nullMissingHandling((x, y) -> new ExprDoubleValue(BigDecimal.valueOf(x.doubleValue()) - .setScale(y.integerValue(), RoundingMode.DOWN).doubleValue())), - DOUBLE, DOUBLE, INTEGER)); - } - - /** - * Definition of acos(x) function. - * Calculates the arc cosine of x, that is, the value whose cosine is x. - * Returns NULL if x is not in the range -1 to 1. - * The supported signature of acos function is + return define( + BuiltinFunctionName.TRUNCATE.getName(), + impl( + nullMissingHandling( + (x, y) -> + new ExprLongValue( + BigDecimal.valueOf(x.integerValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .longValue())), + LONG, + INTEGER, + INTEGER), + impl( + nullMissingHandling( + (x, y) -> + new ExprLongValue( + BigDecimal.valueOf(x.longValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .longValue())), + LONG, + LONG, + INTEGER), + impl( + nullMissingHandling( + (x, y) -> + new ExprDoubleValue( + BigDecimal.valueOf(x.floatValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .doubleValue())), + DOUBLE, + FLOAT, + INTEGER), + impl( + nullMissingHandling( + (x, y) -> + new ExprDoubleValue( + BigDecimal.valueOf(x.doubleValue()) + .setScale(y.integerValue(), RoundingMode.DOWN) + .doubleValue())), + DOUBLE, + DOUBLE, + INTEGER)); + } + + /** + * Definition of acos(x) function.
+ * Calculates the arc cosine of x, that is, the value whose cosine is x.
+ * Returns NULL if x is not in the range -1 to 1.
+ * The supported signature of acos function is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver acos() { - return define(BuiltinFunctionName.ACOS.getName(), + return define( + BuiltinFunctionName.ACOS.getName(), ExprCoreType.numberTypes().stream() - .map(type -> impl(nullMissingHandling( - v -> v.doubleValue() < -1 || v.doubleValue() > 1 ? ExprNullValue.of() : - new ExprDoubleValue(Math.acos(v.doubleValue()))), - DOUBLE, type)).collect(Collectors.toList())); - } - - /** - * Definition of asin(x) function. - * Calculates the arc sine of x, that is, the value whose sine is x. - * Returns NULL if x is not in the range -1 to 1. - * The supported signature of asin function is - * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE + .map( + type -> + impl( + nullMissingHandling( + v -> + v.doubleValue() < -1 || v.doubleValue() > 1 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.acos(v.doubleValue()))), + DOUBLE, + type)) + .collect(Collectors.toList())); + } + + /** + * Definition of asin(x) function.
+ * Calculates the arc sine of x, that is, the value whose sine is x.
+ * Returns NULL if x is not in the range -1 to 1.
+ * The supported signature of asin function is
+ * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE
*/ private static DefaultFunctionResolver asin() { - return define(BuiltinFunctionName.ASIN.getName(), + return define( + BuiltinFunctionName.ASIN.getName(), ExprCoreType.numberTypes().stream() - .map(type -> impl(nullMissingHandling( - v -> v.doubleValue() < -1 || v.doubleValue() > 1 ? ExprNullValue.of() : - new ExprDoubleValue(Math.asin(v.doubleValue()))), - DOUBLE, type)).collect(Collectors.toList())); - } - - /** - * Definition of atan(x) and atan(y, x) function. - * atan(x) calculates the arc tangent of x, that is, the value whose tangent is x. - * atan(y, x) calculates the arc tangent of y / x, except that the signs of both arguments - * are used to determine the quadrant of the result. - * The supported signature of atan function is + .map( + type -> + impl( + nullMissingHandling( + v -> + v.doubleValue() < -1 || v.doubleValue() > 1 + ? ExprNullValue.of() + : new ExprDoubleValue(Math.asin(v.doubleValue()))), + DOUBLE, + type)) + .collect(Collectors.toList())); + } + + /** + * Definition of atan(x) and atan(y, x) function.
+ * atan(x) calculates the arc tangent of x, that is, the value whose tangent is x.
+ * atan(y, x) calculates the arc tangent of y / x, except that the signs of both arguments
+ * are used to determine the quadrant of the result.
+ * The supported signature of atan function is
* (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) -> DOUBLE */ private static DefaultFunctionResolver atan() { - ImmutableList.Builder>> builder = new ImmutableList.Builder<>(); + ImmutableList.Builder< + SerializableFunction>> + builder = new ImmutableList.Builder<>(); for (ExprType type : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling(x -> new ExprDoubleValue(Math.atan(x.doubleValue()))), - type, DOUBLE)); - builder.add(impl(nullMissingHandling((y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), - x.doubleValue()))), - DOUBLE, type, type)); + builder.add( + impl( + nullMissingHandling(x -> new ExprDoubleValue(Math.atan(x.doubleValue()))), + type, + DOUBLE)); + builder.add( + impl( + nullMissingHandling( + (y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), x.doubleValue()))), + DOUBLE, + type, + type)); } return define(BuiltinFunctionName.ATAN.getName(), builder.build()); } /** - * Definition of atan2(y, x) function. - * Calculates the arc tangent of y / x, except that the signs of both arguments - * are used to determine the quadrant of the result. - * The supported signature of atan2 function is + * Definition of atan2(y, x) function.
+ * Calculates the arc tangent of y / x, except that the signs of both arguments are used to + * determine the quadrant of the result.
+ * The supported signature of atan2 function is
* (x: INTEGER/LONG/FLOAT/DOUBLE, y: INTEGER/LONG/FLOAT/DOUBLE) -> DOUBLE */ private static DefaultFunctionResolver atan2() { - ImmutableList.Builder>> builder = new ImmutableList.Builder<>(); + ImmutableList.Builder< + SerializableFunction>> + builder = new ImmutableList.Builder<>(); for (ExprType type : ExprCoreType.numberTypes()) { - builder.add(impl(nullMissingHandling((y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), - x.doubleValue()))), DOUBLE, type, type)); + builder.add( + impl( + nullMissingHandling( + (y, x) -> new ExprDoubleValue(Math.atan2(y.doubleValue(), x.doubleValue()))), + DOUBLE, + type, + type)); } return define(BuiltinFunctionName.ATAN2.getName(), builder.build()); } /** - * Definition of cos(x) function. - * Calculates the cosine of X, where X is given in radians - * The supported signature of cos function is + * Definition of cos(x) function.
+ * Calculates the cosine of X, where X is given in radians
+ * The supported signature of cos function is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cos() { - return baseMathFunction(BuiltinFunctionName.COS.getName(), - v -> new ExprDoubleValue(Math.cos(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.COS.getName(), + v -> new ExprDoubleValue(Math.cos(v.doubleValue())), + DOUBLE); } /** - * Definition of cosh(x) function. - * Returns the hyperbolic cosine of x, defined as (((e^x) + (e^(-x))) / 2) - * The supported signature is + * Definition of cosh(x) function.
+ * Returns the hyperbolic cosine of x, defined as (((e^x) + (e^(-x))) / 2)
+ * The supported signature is
* BYTE/SHORT/INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cosh() { - return baseMathFunction(BuiltinFunctionName.COSH.getName(), - v -> new ExprDoubleValue(Math.cosh(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.COSH.getName(), + v -> new ExprDoubleValue(Math.cosh(v.doubleValue())), + DOUBLE); } /** - * Definition of cot(x) function. - * Calculates the cotangent of x - * The supported signature of cot function is + * Definition of cot(x) function.<\b>
+ * Calculates the cotangent of x The supported signature of cot function is * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver cot() { - return define(BuiltinFunctionName.COT.getName(), + return define( + BuiltinFunctionName.COT.getName(), ExprCoreType.numberTypes().stream() - .map(type -> impl(nullMissingHandling( - v -> { - Double value = v.doubleValue(); - if (value == 0) { - throw new ArithmeticException( - String.format("Out of range value for cot(%s)", value)); - } - return new ExprDoubleValue(1 / Math.tan(value)); - }), - DOUBLE, type)).collect(Collectors.toList())); - } - - /** - * Definition of degrees(x) function. - * Converts x from radians to degrees - * The supported signature of degrees function is + .map( + type -> + impl( + nullMissingHandling( + v -> { + Double value = v.doubleValue(); + if (value == 0) { + throw new ArithmeticException( + String.format("Out of range value for cot(%s)", value)); + } + return new ExprDoubleValue(1 / Math.tan(value)); + }), + DOUBLE, + type)) + .collect(Collectors.toList())); + } + + /** + * Definition of degrees(x) function.
+ * Converts x from radians to degrees The supported signature of degrees function is * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver degrees() { - return baseMathFunction(BuiltinFunctionName.DEGREES.getName(), - v -> new ExprDoubleValue(Math.toDegrees(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.DEGREES.getName(), + v -> new ExprDoubleValue(Math.toDegrees(v.doubleValue())), + DOUBLE); } /** - * Definition of radians(x) function. - * Converts x from degrees to radians - * The supported signature of radians function is + * Definition of radians(x) function.
+ * Converts x from degrees to radians The supported signature of radians function is * INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver radians() { - return baseMathFunction(BuiltinFunctionName.RADIANS.getName(), - v -> new ExprDoubleValue(Math.toRadians(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.RADIANS.getName(), + v -> new ExprDoubleValue(Math.toRadians(v.doubleValue())), + DOUBLE); } /** - * Definition of sin(x) function. - * Calculates the sine of x, where x is given in radians - * The supported signature of sin function is + * Definition of sin(x) function.
+ * Calculates the sine of x, where x is given in radians The supported signature of sin function + * is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver sin() { - return baseMathFunction(BuiltinFunctionName.SIN.getName(), - v -> new ExprDoubleValue(Math.sin(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.SIN.getName(), + v -> new ExprDoubleValue(Math.sin(v.doubleValue())), + DOUBLE); } /** - * Definition of tan(x) function. - * Calculates the tangent of x, where x is given in radians - * The supported signature of tan function is + * Definition of tan(x) function.
+ * Calculates the tangent of x, where x is given in radians The supported signature of tan + * function is
* INTEGER/LONG/FLOAT/DOUBLE -> DOUBLE */ private static DefaultFunctionResolver tan() { - return baseMathFunction(BuiltinFunctionName.TAN.getName(), - v -> new ExprDoubleValue(Math.tan(v.doubleValue())), DOUBLE); + return baseMathFunction( + BuiltinFunctionName.TAN.getName(), + v -> new ExprDoubleValue(Math.tan(v.doubleValue())), + DOUBLE); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java index d3295a53f0..7c3565f69c 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/convert/TypeCastOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.convert; import static org.opensearch.sql.data.type.ExprCoreType.BOOLEAN; @@ -46,9 +45,7 @@ @UtilityClass public class TypeCastOperator { - /** - * Register Type Cast Operator. - */ + /** Register Type Cast Operator. */ public static void register(BuiltinFunctionRepository repository) { repository.register(castToString()); repository.register(castToByte()); @@ -64,148 +61,175 @@ public static void register(BuiltinFunctionRepository repository) { repository.register(castToDatetime()); } - private static DefaultFunctionResolver castToString() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_STRING.getName(), + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_STRING.getName(), Stream.concat( - Arrays.asList(BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, - TIMESTAMP, DATETIME).stream() - .map(type -> impl( - nullMissingHandling((v) -> new ExprStringValue(v.value().toString())), - STRING, type)), - Stream.of(impl(nullMissingHandling((v) -> v), STRING, STRING))) - .collect(Collectors.toList()) - ); + Arrays.asList( + BYTE, SHORT, INTEGER, LONG, FLOAT, DOUBLE, BOOLEAN, TIME, DATE, TIMESTAMP, + DATETIME) + .stream() + .map( + type -> + impl( + nullMissingHandling( + (v) -> new ExprStringValue(v.value().toString())), + STRING, + type)), + Stream.of(impl(nullMissingHandling((v) -> v), STRING, STRING))) + .collect(Collectors.toList())); } private static DefaultFunctionResolver castToByte() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_BYTE.getName(), - impl(nullMissingHandling( - (v) -> new ExprByteValue(Byte.valueOf(v.stringValue()))), BYTE, STRING), - impl(nullMissingHandling( - (v) -> new ExprByteValue(v.byteValue())), BYTE, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprByteValue(v.booleanValue() ? 1 : 0)), BYTE, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_BYTE.getName(), + impl( + nullMissingHandling((v) -> new ExprByteValue(Byte.valueOf(v.stringValue()))), + BYTE, + STRING), + impl(nullMissingHandling((v) -> new ExprByteValue(v.byteValue())), BYTE, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprByteValue(v.booleanValue() ? 1 : 0)), + BYTE, + BOOLEAN)); } private static DefaultFunctionResolver castToShort() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_SHORT.getName(), - impl(nullMissingHandling( - (v) -> new ExprShortValue(Short.valueOf(v.stringValue()))), SHORT, STRING), - impl(nullMissingHandling( - (v) -> new ExprShortValue(v.shortValue())), SHORT, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprShortValue(v.booleanValue() ? 1 : 0)), SHORT, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_SHORT.getName(), + impl( + nullMissingHandling((v) -> new ExprShortValue(Short.valueOf(v.stringValue()))), + SHORT, + STRING), + impl(nullMissingHandling((v) -> new ExprShortValue(v.shortValue())), SHORT, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprShortValue(v.booleanValue() ? 1 : 0)), + SHORT, + BOOLEAN)); } private static DefaultFunctionResolver castToInt() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_INT.getName(), - impl(nullMissingHandling( - (v) -> new ExprIntegerValue(Integer.valueOf(v.stringValue()))), INTEGER, STRING), - impl(nullMissingHandling( - (v) -> new ExprIntegerValue(v.integerValue())), INTEGER, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprIntegerValue(v.booleanValue() ? 1 : 0)), INTEGER, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_INT.getName(), + impl( + nullMissingHandling((v) -> new ExprIntegerValue(Integer.valueOf(v.stringValue()))), + INTEGER, + STRING), + impl(nullMissingHandling((v) -> new ExprIntegerValue(v.integerValue())), INTEGER, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprIntegerValue(v.booleanValue() ? 1 : 0)), + INTEGER, + BOOLEAN)); } private static DefaultFunctionResolver castToLong() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_LONG.getName(), - impl(nullMissingHandling( - (v) -> new ExprLongValue(Long.valueOf(v.stringValue()))), LONG, STRING), - impl(nullMissingHandling( - (v) -> new ExprLongValue(v.longValue())), LONG, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprLongValue(v.booleanValue() ? 1L : 0L)), LONG, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_LONG.getName(), + impl( + nullMissingHandling((v) -> new ExprLongValue(Long.valueOf(v.stringValue()))), + LONG, + STRING), + impl(nullMissingHandling((v) -> new ExprLongValue(v.longValue())), LONG, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprLongValue(v.booleanValue() ? 1L : 0L)), + LONG, + BOOLEAN)); } private static DefaultFunctionResolver castToFloat() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_FLOAT.getName(), - impl(nullMissingHandling( - (v) -> new ExprFloatValue(Float.valueOf(v.stringValue()))), FLOAT, STRING), - impl(nullMissingHandling( - (v) -> new ExprFloatValue(v.floatValue())), FLOAT, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprFloatValue(v.booleanValue() ? 1f : 0f)), FLOAT, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_FLOAT.getName(), + impl( + nullMissingHandling((v) -> new ExprFloatValue(Float.valueOf(v.stringValue()))), + FLOAT, + STRING), + impl(nullMissingHandling((v) -> new ExprFloatValue(v.floatValue())), FLOAT, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprFloatValue(v.booleanValue() ? 1f : 0f)), + FLOAT, + BOOLEAN)); } private static DefaultFunctionResolver castToDouble() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_DOUBLE.getName(), - impl(nullMissingHandling( - (v) -> new ExprDoubleValue(Double.valueOf(v.stringValue()))), DOUBLE, STRING), - impl(nullMissingHandling( - (v) -> new ExprDoubleValue(v.doubleValue())), DOUBLE, DOUBLE), - impl(nullMissingHandling( - (v) -> new ExprDoubleValue(v.booleanValue() ? 1D : 0D)), DOUBLE, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_DOUBLE.getName(), + impl( + nullMissingHandling((v) -> new ExprDoubleValue(Double.valueOf(v.stringValue()))), + DOUBLE, + STRING), + impl(nullMissingHandling((v) -> new ExprDoubleValue(v.doubleValue())), DOUBLE, DOUBLE), + impl( + nullMissingHandling((v) -> new ExprDoubleValue(v.booleanValue() ? 1D : 0D)), + DOUBLE, + BOOLEAN)); } private static DefaultFunctionResolver castToBoolean() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), - impl(nullMissingHandling( - (v) -> ExprBooleanValue.of(Boolean.valueOf(v.stringValue()))), BOOLEAN, STRING), - impl(nullMissingHandling( - (v) -> ExprBooleanValue.of(v.doubleValue() != 0)), BOOLEAN, DOUBLE), - impl(nullMissingHandling((v) -> v), BOOLEAN, BOOLEAN) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), + impl( + nullMissingHandling((v) -> ExprBooleanValue.of(Boolean.valueOf(v.stringValue()))), + BOOLEAN, + STRING), + impl( + nullMissingHandling((v) -> ExprBooleanValue.of(v.doubleValue() != 0)), BOOLEAN, DOUBLE), + impl(nullMissingHandling((v) -> v), BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver castToDate() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_DATE.getName(), - impl(nullMissingHandling( - (v) -> new ExprDateValue(v.stringValue())), DATE, STRING), - impl(nullMissingHandling( - (v) -> new ExprDateValue(v.dateValue())), DATE, DATETIME), - impl(nullMissingHandling( - (v) -> new ExprDateValue(v.dateValue())), DATE, TIMESTAMP), - impl(nullMissingHandling((v) -> v), DATE, DATE) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_DATE.getName(), + impl(nullMissingHandling((v) -> new ExprDateValue(v.stringValue())), DATE, STRING), + impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, DATETIME), + impl(nullMissingHandling((v) -> new ExprDateValue(v.dateValue())), DATE, TIMESTAMP), + impl(nullMissingHandling((v) -> v), DATE, DATE)); } private static DefaultFunctionResolver castToTime() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_TIME.getName(), - impl(nullMissingHandling( - (v) -> new ExprTimeValue(v.stringValue())), TIME, STRING), - impl(nullMissingHandling( - (v) -> new ExprTimeValue(v.timeValue())), TIME, DATETIME), - impl(nullMissingHandling( - (v) -> new ExprTimeValue(v.timeValue())), TIME, TIMESTAMP), - impl(nullMissingHandling((v) -> v), TIME, TIME) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_TIME.getName(), + impl(nullMissingHandling((v) -> new ExprTimeValue(v.stringValue())), TIME, STRING), + impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, DATETIME), + impl(nullMissingHandling((v) -> new ExprTimeValue(v.timeValue())), TIME, TIMESTAMP), + impl(nullMissingHandling((v) -> v), TIME, TIME)); } // `DATE`/`TIME`/`DATETIME` -> `DATETIME`/TIMESTAMP` cast tested in BinaryPredicateOperatorTest private static DefaultFunctionResolver castToTimestamp() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), - impl(nullMissingHandling( - (v) -> new ExprTimestampValue(v.stringValue())), TIMESTAMP, STRING), - impl(nullMissingHandling( - (v) -> new ExprTimestampValue(v.timestampValue())), TIMESTAMP, DATETIME), - impl(nullMissingHandling( - (v) -> new ExprTimestampValue(v.timestampValue())), TIMESTAMP, DATE), - implWithProperties(nullMissingHandlingWithProperties( - (fp, v) -> new ExprTimestampValue(((ExprTimeValue)v).timestampValue(fp))), - TIMESTAMP, TIME), - impl(nullMissingHandling((v) -> v), TIMESTAMP, TIMESTAMP) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), + impl( + nullMissingHandling((v) -> new ExprTimestampValue(v.stringValue())), TIMESTAMP, STRING), + impl( + nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), + TIMESTAMP, + DATETIME), + impl( + nullMissingHandling((v) -> new ExprTimestampValue(v.timestampValue())), + TIMESTAMP, + DATE), + implWithProperties( + nullMissingHandlingWithProperties( + (fp, v) -> new ExprTimestampValue(((ExprTimeValue) v).timestampValue(fp))), + TIMESTAMP, + TIME), + impl(nullMissingHandling((v) -> v), TIMESTAMP, TIMESTAMP)); } private static DefaultFunctionResolver castToDatetime() { - return FunctionDSL.define(BuiltinFunctionName.CAST_TO_DATETIME.getName(), - impl(nullMissingHandling( - (v) -> new ExprDatetimeValue(v.stringValue())), DATETIME, STRING), - impl(nullMissingHandling( - (v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, TIMESTAMP), - impl(nullMissingHandling( - (v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, DATE), - implWithProperties(nullMissingHandlingWithProperties( - (fp, v) -> new ExprDatetimeValue(((ExprTimeValue)v).datetimeValue(fp))), - DATETIME, TIME), - impl(nullMissingHandling((v) -> v), DATETIME, DATETIME) - ); + return FunctionDSL.define( + BuiltinFunctionName.CAST_TO_DATETIME.getName(), + impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.stringValue())), DATETIME, STRING), + impl( + nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), + DATETIME, + TIMESTAMP), + impl(nullMissingHandling((v) -> new ExprDatetimeValue(v.datetimeValue())), DATETIME, DATE), + implWithProperties( + nullMissingHandlingWithProperties( + (fp, v) -> new ExprDatetimeValue(((ExprTimeValue) v).datetimeValue(fp))), + DATETIME, + TIME), + impl(nullMissingHandling((v) -> v), DATETIME, DATETIME)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java index cc5b47bde1..bf6b3c22f5 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/BinaryPredicateOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.predicate; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; @@ -30,10 +29,10 @@ import org.opensearch.sql.utils.OperatorUtils; /** - * The definition of binary predicate function - * and, Accepts two Boolean values and produces a Boolean. - * or, Accepts two Boolean values and produces a Boolean. - * xor, Accepts two Boolean values and produces a Boolean. + * The definition of binary predicate function
+ * and, Accepts two Boolean values and produces a Boolean.
+ * or, Accepts two Boolean values and produces a Boolean.
+ * xor, Accepts two Boolean values and produces a Boolean.
* equalTo, Compare the left expression and right expression and produces a Boolean. */ @UtilityClass @@ -60,17 +59,64 @@ public static void register(BuiltinFunctionRepository repository) { /** * The and logic. - * A B A AND B - * TRUE TRUE TRUE - * TRUE FALSE FALSE - * TRUE NULL NULL - * TRUE MISSING MISSING - * FALSE FALSE FALSE - * FALSE NULL FALSE - * FALSE MISSING FALSE - * NULL NULL NULL - * NULL MISSING MISSING - * MISSING MISSING MISSING + * + *
ExprTypeWidens to data types
INTEGERLONG, FLOAT, DOUBLE
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ABA AND B
TRUETRUETRUE
TRUEFALSEFALSE
TRUENULLNULL
TRUEMISSINGMISSING
FALSEFALSEFALSE
FALSENULLFALSE
FALSEMISSINGFALSE
NULLNULLNULL
NULLMISSINGMISSING
MISSINGMISSINGMISSING
*/ private static Table andTable = new ImmutableTable.Builder() @@ -88,17 +134,64 @@ public static void register(BuiltinFunctionRepository repository) { /** * The or logic. - * A B A AND B - * TRUE TRUE TRUE - * TRUE FALSE TRUE - * TRUE NULL TRUE - * TRUE MISSING TRUE - * FALSE FALSE FALSE - * FALSE NULL NULL - * FALSE MISSING MISSING - * NULL NULL NULL - * NULL MISSING NULL - * MISSING MISSING MISSING + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ABA OR B
TRUETRUETRUE
TRUEFALSETRUE
TRUENULLTRUE
TRUEMISSINGTRUE
FALSEFALSEFALSE
FALSENULLNULL
FALSEMISSINGMISSING
NULLNULLNULL
NULLMISSINGNULL
MISSINGMISSINGMISSING
*/ private static Table orTable = new ImmutableTable.Builder() @@ -116,17 +209,64 @@ public static void register(BuiltinFunctionRepository repository) { /** * The xor logic. - * A B A AND B - * TRUE TRUE FALSE - * TRUE FALSE TRUE - * TRUE NULL TRUE - * TRUE MISSING TRUE - * FALSE FALSE FALSE - * FALSE NULL NULL - * FALSE MISSING MISSING - * NULL NULL NULL - * NULL MISSING NULL - * MISSING MISSING MISSING + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ABA XOR B
TRUETRUEFALSE
TRUEFALSETRUE
TRUENULLTRUE
TRUEMISSINGTRUE
FALSEFALSEFALSE
FALSENULLNULL
FALSEMISSINGMISSING
NULLNULLNULL
NULLMISSINGNULL
MISSINGMISSINGMISSING
*/ private static Table xorTable = new ImmutableTable.Builder() @@ -143,87 +283,132 @@ public static void register(BuiltinFunctionRepository repository) { .build(); private static DefaultFunctionResolver and() { - return define(BuiltinFunctionName.AND.getName(), + return define( + BuiltinFunctionName.AND.getName(), impl((v1, v2) -> lookupTableFunction(v1, v2, andTable), BOOLEAN, BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver or() { - return define(BuiltinFunctionName.OR.getName(), + return define( + BuiltinFunctionName.OR.getName(), impl((v1, v2) -> lookupTableFunction(v1, v2, orTable), BOOLEAN, BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver xor() { - return define(BuiltinFunctionName.XOR.getName(), + return define( + BuiltinFunctionName.XOR.getName(), impl((v1, v2) -> lookupTableFunction(v1, v2, xorTable), BOOLEAN, BOOLEAN, BOOLEAN)); } private static DefaultFunctionResolver equal() { - return define(BuiltinFunctionName.EQUAL.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.equals(v2))), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.EQUAL.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.equals(v2))), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver notEqual() { - return define(BuiltinFunctionName.NOTEQUAL.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(!v1.equals(v2))), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.NOTEQUAL.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(!v1.equals(v2))), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver less() { - return define(BuiltinFunctionName.LESS.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) < 0)), - BOOLEAN,type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.LESS.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) < 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver lte() { - return define(BuiltinFunctionName.LTE.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) <= 0)), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.LTE.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) <= 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver greater() { - return define(BuiltinFunctionName.GREATER.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) > 0)), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.GREATER.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) > 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver gte() { - return define(BuiltinFunctionName.GTE.getName(), ExprCoreType.coreTypes().stream() - .map(type -> impl(nullMissingHandling( - (v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) >= 0)), - BOOLEAN, type, type)) - .collect(Collectors.toList())); + return define( + BuiltinFunctionName.GTE.getName(), + ExprCoreType.coreTypes().stream() + .map( + type -> + impl( + nullMissingHandling((v1, v2) -> ExprBooleanValue.of(v1.compareTo(v2) >= 0)), + BOOLEAN, + type, + type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver like() { - return define(BuiltinFunctionName.LIKE.getName(), + return define( + BuiltinFunctionName.LIKE.getName(), impl(nullMissingHandling(OperatorUtils::matches), BOOLEAN, STRING, STRING)); } private static DefaultFunctionResolver regexp() { - return define(BuiltinFunctionName.REGEXP.getName(), + return define( + BuiltinFunctionName.REGEXP.getName(), impl(nullMissingHandling(OperatorUtils::matchesRegexp), INTEGER, STRING, STRING)); } private static DefaultFunctionResolver notLike() { - return define(BuiltinFunctionName.NOT_LIKE.getName(), - impl(nullMissingHandling( - (v1, v2) -> UnaryPredicateOperator.not(OperatorUtils.matches(v1, v2))), - BOOLEAN, STRING, STRING)); + return define( + BuiltinFunctionName.NOT_LIKE.getName(), + impl( + nullMissingHandling( + (v1, v2) -> UnaryPredicateOperator.not(OperatorUtils.matches(v1, v2))), + BOOLEAN, + STRING, + STRING)); } - private static ExprValue lookupTableFunction(ExprValue arg1, ExprValue arg2, - Table table) { + private static ExprValue lookupTableFunction( + ExprValue arg1, ExprValue arg2, Table table) { if (table.contains(arg1, arg2)) { return table.get(arg1, arg2); } else { diff --git a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java index 7d79d9d923..ad9d9ac934 100644 --- a/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java +++ b/core/src/main/java/org/opensearch/sql/expression/operator/predicate/UnaryPredicateOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.operator.predicate; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL; @@ -28,14 +27,11 @@ import org.opensearch.sql.expression.function.SerializableFunction; /** - * The definition of unary predicate function - * not, Accepts one Boolean value and produces a Boolean. + * The definition of unary predicate function not, Accepts one Boolean value and produces a Boolean. */ @UtilityClass public class UnaryPredicateOperator { - /** - * Register Unary Predicate Function. - */ + /** Register Unary Predicate Function. */ public static void register(BuiltinFunctionRepository repository) { repository.register(not()); repository.register(isNotNull()); @@ -47,17 +43,36 @@ public static void register(BuiltinFunctionRepository repository) { } private static DefaultFunctionResolver not() { - return FunctionDSL.define(BuiltinFunctionName.NOT.getName(), FunctionDSL - .impl(UnaryPredicateOperator::not, BOOLEAN, BOOLEAN)); + return FunctionDSL.define( + BuiltinFunctionName.NOT.getName(), + FunctionDSL.impl(UnaryPredicateOperator::not, BOOLEAN, BOOLEAN)); } /** * The not logic. - * A NOT A - * TRUE FALSE - * FALSE TRUE - * NULL NULL - * MISSING MISSING + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
ANOT A
TRUEFALSE
FALSETRUE
NULLNULL
MISSINGMISSING
*/ public ExprValue not(ExprValue v) { if (v.isMissing() || v.isNull()) { @@ -68,31 +83,33 @@ public ExprValue not(ExprValue v) { } private static DefaultFunctionResolver isNull(BuiltinFunctionName funcName) { - return FunctionDSL - .define(funcName.getName(), Arrays.stream(ExprCoreType.values()) - .map(type -> FunctionDSL - .impl((v) -> ExprBooleanValue.of(v.isNull()), BOOLEAN, type)) - .collect( - Collectors.toList())); + return FunctionDSL.define( + funcName.getName(), + Arrays.stream(ExprCoreType.values()) + .map(type -> FunctionDSL.impl((v) -> ExprBooleanValue.of(v.isNull()), BOOLEAN, type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver isNotNull() { - return FunctionDSL - .define(BuiltinFunctionName.IS_NOT_NULL.getName(), Arrays.stream(ExprCoreType.values()) - .map(type -> FunctionDSL - .impl((v) -> ExprBooleanValue.of(!v.isNull()), BOOLEAN, type)) - .collect( - Collectors.toList())); + return FunctionDSL.define( + BuiltinFunctionName.IS_NOT_NULL.getName(), + Arrays.stream(ExprCoreType.values()) + .map(type -> FunctionDSL.impl((v) -> ExprBooleanValue.of(!v.isNull()), BOOLEAN, type)) + .collect(Collectors.toList())); } private static DefaultFunctionResolver ifFunction() { FunctionName functionName = BuiltinFunctionName.IF.getName(); List typeList = ExprCoreType.coreTypes(); - List>> functionsOne = typeList.stream().map(v -> - impl((UnaryPredicateOperator::exprIf), v, BOOLEAN, v, v)) - .collect(Collectors.toList()); + List< + SerializableFunction< + FunctionName, + org.apache.commons.lang3.tuple.Pair>> + functionsOne = + typeList.stream() + .map(v -> impl((UnaryPredicateOperator::exprIf), v, BOOLEAN, v, v)) + .collect(Collectors.toList()); DefaultFunctionResolver functionResolver = FunctionDSL.define(functionName, functionsOne); return functionResolver; @@ -102,10 +119,14 @@ private static DefaultFunctionResolver ifNull() { FunctionName functionName = BuiltinFunctionName.IFNULL.getName(); List typeList = ExprCoreType.coreTypes(); - List>> functionsOne = typeList.stream().map(v -> - impl((UnaryPredicateOperator::exprIfNull), v, v, v)) - .collect(Collectors.toList()); + List< + SerializableFunction< + FunctionName, + org.apache.commons.lang3.tuple.Pair>> + functionsOne = + typeList.stream() + .map(v -> impl((UnaryPredicateOperator::exprIfNull), v, v, v)) + .collect(Collectors.toList()); DefaultFunctionResolver functionResolver = FunctionDSL.define(functionName, functionsOne); return functionResolver; @@ -116,14 +137,16 @@ private static DefaultFunctionResolver nullIf() { List typeList = ExprCoreType.coreTypes(); DefaultFunctionResolver functionResolver = - FunctionDSL.define(functionName, - typeList.stream().map(v -> - impl((UnaryPredicateOperator::exprNullIf), v, v, v)) - .collect(Collectors.toList())); + FunctionDSL.define( + functionName, + typeList.stream() + .map(v -> impl((UnaryPredicateOperator::exprNullIf), v, v, v)) + .collect(Collectors.toList())); return functionResolver; } - /** v2 if v1 is null. + /** + * v2 if v1 is null. * * @param v1 varable 1 * @param v2 varable 2 @@ -133,7 +156,8 @@ public static ExprValue exprIfNull(ExprValue v1, ExprValue v2) { return (v1.isNull() || v1.isMissing()) ? v2 : v1; } - /** return null if v1 equls to v2. + /** + * return null if v1 equls to v2. * * @param v1 varable 1 * @param v2 varable 2 @@ -146,5 +170,4 @@ public static ExprValue exprNullIf(ExprValue v1, ExprValue v2) { public static ExprValue exprIf(ExprValue v1, ExprValue v2, ExprValue v3) { return !v1.isNull() && !v1.isMissing() && LITERAL_TRUE.equals(v1) ? v2 : v3; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java index 9797832f07..748ce5f559 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/GrokExpression.java @@ -20,9 +20,7 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.expression.Expression; -/** - * GrokExpression with grok patterns. - */ +/** GrokExpression with grok patterns. */ @EqualsAndHashCode(callSuper = true) @ToString public class GrokExpression extends ParseExpression { @@ -33,15 +31,14 @@ public class GrokExpression extends ParseExpression { grokCompiler.registerDefaultPatterns(); } - @EqualsAndHashCode.Exclude - private final Grok grok; + @EqualsAndHashCode.Exclude private final Grok grok; /** * GrokExpression. * * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field */ public GrokExpression(Expression sourceField, Expression pattern, Expression identifier) { super("grok", sourceField, pattern, identifier); @@ -69,7 +66,9 @@ ExprValue parseValue(ExprValue value) throws ExpressionEvaluationException { */ public static List getNamedGroupCandidates(String pattern) { Grok grok = grokCompiler.compile(pattern); - return grok.namedGroups.stream().map(grok::getNamedRegexCollectionById) - .filter(group -> !group.equals("UNWANTED")).collect(Collectors.toUnmodifiableList()); + return grok.namedGroups.stream() + .map(grok::getNamedRegexCollectionById) + .filter(group -> !group.equals("UNWANTED")) + .collect(Collectors.toUnmodifiableList()); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java index 8d1ebcce08..6e2456ecc2 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/ParseExpression.java @@ -21,29 +21,25 @@ import org.opensearch.sql.expression.env.Environment; import org.opensearch.sql.expression.function.FunctionName; -/** - * ParseExpression. - */ +/** ParseExpression. */ @EqualsAndHashCode(callSuper = false) @ToString public abstract class ParseExpression extends FunctionExpression { - @Getter - protected final Expression sourceField; + @Getter protected final Expression sourceField; protected final Expression pattern; - @Getter - protected final Expression identifier; + @Getter protected final Expression identifier; protected final String identifierStr; /** * ParseExpression. * * @param functionName name of function expression - * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param sourceField source text field + * @param pattern pattern used for parsing + * @param identifier derived field */ - public ParseExpression(String functionName, Expression sourceField, Expression pattern, - Expression identifier) { + public ParseExpression( + String functionName, Expression sourceField, Expression pattern, Expression identifier) { super(FunctionName.of(functionName), ImmutableList.of(sourceField, pattern, identifier)); this.sourceField = sourceField; this.pattern = pattern; diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java index 67160dad58..5b92779c35 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/PatternsExpression.java @@ -17,30 +17,28 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.expression.Expression; -/** - * PatternsExpression with regex filter. - */ +/** PatternsExpression with regex filter. */ @EqualsAndHashCode(callSuper = true) @ToString public class PatternsExpression extends ParseExpression { - /** - * Default name of the derived field. - */ + /** Default name of the derived field. */ public static final String DEFAULT_NEW_FIELD = "patterns_field"; - private static final ImmutableSet DEFAULT_IGNORED_CHARS = ImmutableSet.copyOf( - "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789".chars() - .mapToObj(c -> (char) c).toArray(Character[]::new)); + private static final ImmutableSet DEFAULT_IGNORED_CHARS = + ImmutableSet.copyOf( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + .chars() + .mapToObj(c -> (char) c) + .toArray(Character[]::new)); private final boolean useCustomPattern; - @EqualsAndHashCode.Exclude - private Pattern pattern; + @EqualsAndHashCode.Exclude private Pattern pattern; /** * PatternsExpression. * * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field */ public PatternsExpression(Expression sourceField, Expression pattern, Expression identifier) { super("patterns", sourceField, pattern, identifier); diff --git a/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java b/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java index f3a3ff0b66..7514c9df69 100644 --- a/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/parse/RegexExpression.java @@ -19,24 +19,20 @@ import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.expression.Expression; -/** - * RegexExpression with regex and named capture group. - */ +/** RegexExpression with regex and named capture group. */ @EqualsAndHashCode(callSuper = true) @ToString public class RegexExpression extends ParseExpression { private static final Logger log = LogManager.getLogger(RegexExpression.class); private static final Pattern GROUP_PATTERN = Pattern.compile("\\(\\?<([a-zA-Z][a-zA-Z0-9]*)>"); - @Getter - @EqualsAndHashCode.Exclude - private final Pattern regexPattern; + @Getter @EqualsAndHashCode.Exclude private final Pattern regexPattern; /** * RegexExpression. * * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field */ public RegexExpression(Expression sourceField, Expression pattern, Expression identifier) { super("regex", sourceField, pattern, identifier); diff --git a/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java b/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java index aff114145e..949ed52e7f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java @@ -24,9 +24,7 @@ public class SpanExpression implements Expression { private final Expression value; private final SpanUnit unit; - /** - * Construct a span expression by field and span interval expression. - */ + /** Construct a span expression by field and span interval expression. */ public SpanExpression(Expression field, Expression value, SpanUnit unit) { this.field = field; this.value = value; @@ -35,18 +33,46 @@ public SpanExpression(Expression field, Expression value, SpanUnit unit) { @Override public ExprValue valueOf(Environment valueEnv) { - Rounding rounding = Rounding.createRounding(this); //TODO: will integrate with WindowAssigner + Rounding rounding = + Rounding.createRounding(this); // TODO: will integrate with WindowAssigner return rounding.round(field.valueOf(valueEnv)); } /** * Return type follows the following table. - * FIELD VALUE RETURN_TYPE - * int/long integer int/long (field type) - * int/long double double - * float/double integer float/double (field type) - * float/double double float/double (field type) - * other any field type + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
FIELDVALUERETURN_TYPE
int/longintegerint/long (field type)
int/longdoubledouble
float/doubleintegerfloat/double (field type)
float/doubledoublefloat/double (field type)
otheranyfield type
*/ @Override public ExprType type() { diff --git a/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java b/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java index e12bcd0a58..cf071c4f31 100644 --- a/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java +++ b/core/src/main/java/org/opensearch/sql/expression/system/SystemFunctions.java @@ -24,9 +24,7 @@ @UtilityClass public class SystemFunctions { - /** - * Register TypeOf Operator. - */ + /** Register TypeOf Operator. */ public static void register(BuiltinFunctionRepository repository) { repository.register(typeof()); } @@ -37,19 +35,20 @@ private static FunctionResolver typeof() { @Override public Pair resolve( FunctionSignature unresolvedSignature) { - return Pair.of(unresolvedSignature, + return Pair.of( + unresolvedSignature, (functionProperties, arguments) -> new FunctionExpression(BuiltinFunctionName.TYPEOF.getName(), arguments) { - @Override - public ExprValue valueOf(Environment valueEnv) { - return new ExprStringValue(getArguments().get(0).type().legacyTypeName()); - } + @Override + public ExprValue valueOf(Environment valueEnv) { + return new ExprStringValue(getArguments().get(0).type().legacyTypeName()); + } - @Override - public ExprType type() { - return STRING; - } - }); + @Override + public ExprType type() { + return STRING; + } + }); } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java index 0bbfb65154..1cf7f64867 100644 --- a/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/text/TextFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.text; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -35,9 +34,8 @@ import org.opensearch.sql.expression.function.SerializableTriFunction; /** - * The definition of text functions. - * 1) have the clear interface for function define. - * 2) the implementation should rely on ExprValue. + * The definition of text functions. 1) have the clear interface for function define. 2) the + * implementation should rely on ExprValue. */ @UtilityClass public class TextFunction { @@ -70,17 +68,21 @@ public void register(BuiltinFunctionRepository repository) { } /** - * Gets substring starting at given point, for optional given length. - * Form of this function using keywords instead of comma delimited variables is not supported. - * Supports following signatures: + * Gets substring starting at given point, for optional given length.
+ * Form of this function using keywords instead of comma delimited variables is not supported.
+ * Supports following signatures:
* (STRING, INTEGER)/(STRING, INTEGER, INTEGER) -> STRING */ private DefaultFunctionResolver substringSubstr(FunctionName functionName) { - return define(functionName, - impl(nullMissingHandling(TextFunction::exprSubstrStart), - STRING, STRING, INTEGER), - impl(nullMissingHandling(TextFunction::exprSubstrStartLength), - STRING, STRING, INTEGER, INTEGER)); + return define( + functionName, + impl(nullMissingHandling(TextFunction::exprSubstrStart), STRING, STRING, INTEGER), + impl( + nullMissingHandling(TextFunction::exprSubstrStartLength), + STRING, + STRING, + INTEGER, + INTEGER)); } private DefaultFunctionResolver substring() { @@ -92,222 +94,277 @@ private DefaultFunctionResolver substr() { } /** - * Removes leading whitespace from string. - * Supports following signatures: + * Removes leading whitespace from string.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver ltrim() { - return define(BuiltinFunctionName.LTRIM.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripLeading())), - STRING, STRING)); + return define( + BuiltinFunctionName.LTRIM.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripLeading())), + STRING, + STRING)); } /** - * Removes trailing whitespace from string. - * Supports following signatures: + * Removes trailing whitespace from string.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver rtrim() { - return define(BuiltinFunctionName.RTRIM.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripTrailing())), - STRING, STRING)); + return define( + BuiltinFunctionName.RTRIM.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue(v.stringValue().stripTrailing())), + STRING, + STRING)); } /** - * Removes leading and trailing whitespace from string. - * Has option to specify a String to trim instead of whitespace but this is not yet supported. - * Supporting String specification requires finding keywords inside TRIM command. - * Supports following signatures: + * Removes leading and trailing whitespace from string.
+ * Has option to specify a String to trim instead of whitespace but this is not yet supported.
+ * Supporting String specification requires finding keywords inside TRIM command.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver trim() { - return define(BuiltinFunctionName.TRIM.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue(v.stringValue().trim())), - STRING, STRING)); + return define( + BuiltinFunctionName.TRIM.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue(v.stringValue().trim())), + STRING, + STRING)); } /** - * Converts String to lowercase. - * Supports following signatures: + * Converts String to lowercase.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver lower() { - return define(BuiltinFunctionName.LOWER.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toLowerCase()))), - STRING, STRING) - ); + return define( + BuiltinFunctionName.LOWER.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toLowerCase()))), + STRING, + STRING)); } /** - * Converts String to uppercase. - * Supports following signatures: + * Converts String to uppercase.
+ * Supports following signatures:
* STRING -> STRING */ private DefaultFunctionResolver upper() { - return define(BuiltinFunctionName.UPPER.getName(), - impl(nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toUpperCase()))), - STRING, STRING) - ); + return define( + BuiltinFunctionName.UPPER.getName(), + impl( + nullMissingHandling((v) -> new ExprStringValue((v.stringValue().toUpperCase()))), + STRING, + STRING)); } /** - * Concatenates a list of Strings. - * Supports following signatures: + * Concatenates a list of Strings.
+ * Supports following signatures:
* (STRING, STRING, ...., STRING) -> STRING */ private DefaultFunctionResolver concat() { FunctionName concatFuncName = BuiltinFunctionName.CONCAT.getName(); - return define(concatFuncName, funcName -> + return define( + concatFuncName, + funcName -> Pair.of( - new FunctionSignature(concatFuncName, Collections.singletonList(ARRAY)), - (funcProp, args) -> new FunctionExpression(funcName, args) { + new FunctionSignature(concatFuncName, Collections.singletonList(ARRAY)), + (funcProp, args) -> + new FunctionExpression(funcName, args) { @Override public ExprValue valueOf(Environment valueEnv) { - List exprValues = args.stream() - .map(arg -> arg.valueOf(valueEnv)).collect(Collectors.toList()); - if (exprValues.stream().anyMatch(ExprValue::isMissing)) { - return ExprValueUtils.missingValue(); - } - if (exprValues.stream().anyMatch(ExprValue::isNull)) { - return ExprValueUtils.nullValue(); - } - return new ExprStringValue(exprValues.stream() - .map(ExprValue::stringValue) - .collect(Collectors.joining())); + List exprValues = + args.stream() + .map(arg -> arg.valueOf(valueEnv)) + .collect(Collectors.toList()); + if (exprValues.stream().anyMatch(ExprValue::isMissing)) { + return ExprValueUtils.missingValue(); + } + if (exprValues.stream().anyMatch(ExprValue::isNull)) { + return ExprValueUtils.nullValue(); + } + return new ExprStringValue( + exprValues.stream() + .map(ExprValue::stringValue) + .collect(Collectors.joining())); } @Override public ExprType type() { return STRING; } - } - )); + })); } /** - * TODO: https://github.com/opendistro-for-elasticsearch/sql/issues/710 - * Extend to accept variable argument amounts. - * Concatenates a list of Strings with a separator string. - * Supports following signatures: + * TODO: https://github.com/opendistro-for-elasticsearch/sql/issues/710
+ * Extend to accept variable argument amounts.
+ *
+ * Concatenates a list of Strings with a separator string. Supports following
+ * signatures:
* (STRING, STRING, STRING) -> STRING */ private DefaultFunctionResolver concat_ws() { - return define(BuiltinFunctionName.CONCAT_WS.getName(), - impl(nullMissingHandling((sep, str1, str2) -> - new ExprStringValue(str1.stringValue() + sep.stringValue() + str2.stringValue())), - STRING, STRING, STRING, STRING)); + return define( + BuiltinFunctionName.CONCAT_WS.getName(), + impl( + nullMissingHandling( + (sep, str1, str2) -> + new ExprStringValue( + str1.stringValue() + sep.stringValue() + str2.stringValue())), + STRING, + STRING, + STRING, + STRING)); } /** - * Calculates length of String in bytes. - * Supports following signatures: + * Calculates length of String in bytes.
+ * Supports following signatures:
* STRING -> INTEGER */ private DefaultFunctionResolver length() { - return define(BuiltinFunctionName.LENGTH.getName(), - impl(nullMissingHandling((str) -> - new ExprIntegerValue(str.stringValue().getBytes().length)), INTEGER, STRING)); + return define( + BuiltinFunctionName.LENGTH.getName(), + impl( + nullMissingHandling((str) -> new ExprIntegerValue(str.stringValue().getBytes().length)), + INTEGER, + STRING)); } /** - * Does String comparison of two Strings and returns Integer value. - * Supports following signatures: + * Does String comparison of two Strings and returns Integer value.
+ * Supports following signatures:
* (STRING, STRING) -> INTEGER */ private DefaultFunctionResolver strcmp() { - return define(BuiltinFunctionName.STRCMP.getName(), - impl(nullMissingHandling((str1, str2) -> - new ExprIntegerValue(Integer.compare( - str1.stringValue().compareTo(str2.stringValue()), 0))), - INTEGER, STRING, STRING)); + return define( + BuiltinFunctionName.STRCMP.getName(), + impl( + nullMissingHandling( + (str1, str2) -> + new ExprIntegerValue( + Integer.compare(str1.stringValue().compareTo(str2.stringValue()), 0))), + INTEGER, + STRING, + STRING)); } /** - * Returns the rightmost len characters from the string str, or NULL if any argument is NULL. - * Supports following signatures: + * Returns the rightmost len characters from the string str, or NULL if any argument is + * NULL.
+ * Supports following signatures:
* (STRING, INTEGER) -> STRING */ private DefaultFunctionResolver right() { - return define(BuiltinFunctionName.RIGHT.getName(), - impl(nullMissingHandling(TextFunction::exprRight), STRING, STRING, INTEGER)); + return define( + BuiltinFunctionName.RIGHT.getName(), + impl(nullMissingHandling(TextFunction::exprRight), STRING, STRING, INTEGER)); } /** - * Returns the leftmost len characters from the string str, or NULL if any argument is NULL. - * Supports following signature: + * Returns the leftmost len characters from the string str, or NULL if any argument is + * NULL.
+ * Supports following signature:
* (STRING, INTEGER) -> STRING */ private DefaultFunctionResolver left() { - return define(BuiltinFunctionName.LEFT.getName(), + return define( + BuiltinFunctionName.LEFT.getName(), impl(nullMissingHandling(TextFunction::exprLeft), STRING, STRING, INTEGER)); } /** - * Returns the numeric value of the leftmost character of the string str. - * Returns 0 if str is the empty string. Returns NULL if str is NULL. - * ASCII() works for 8-bit characters. - * Supports following signature: + * Returns the numeric value of the leftmost character of the string str.
+ * Returns 0 if str is the empty string. Returns NULL if str is NULL.
+ * ASCII() works for 8-bit characters.
+ * Supports following signature:
* STRING -> INTEGER */ private DefaultFunctionResolver ascii() { - return define(BuiltinFunctionName.ASCII.getName(), + return define( + BuiltinFunctionName.ASCII.getName(), impl(nullMissingHandling(TextFunction::exprAscii), INTEGER, STRING)); } /** - * LOCATE(substr, str) returns the position of the first occurrence of substring substr - * in string str. LOCATE(substr, str, pos) returns the position of the first occurrence - * of substring substr in string str, starting at position pos. - * Returns 0 if substr is not in str. - * Returns NULL if any argument is NULL. - * Supports following signature: - * (STRING, STRING) -> INTEGER + * LOCATE(substr, str) returns the position of the first occurrence of substring substr
+ * in string str. LOCATE(substr, str, pos) returns the position of the first occurrence
+ * of substring substr in string str, starting at position pos.
+ * Returns 0 if substr is not in str.
+ * Returns NULL if any argument is NULL.
+ * Supports following signature:
+ * (STRING, STRING) -> INTEGER
* (STRING, STRING, INTEGER) -> INTEGER */ private DefaultFunctionResolver locate() { - return define(BuiltinFunctionName.LOCATE.getName(), - impl(nullMissingHandling( - (SerializableBiFunction) - TextFunction::exprLocate), INTEGER, STRING, STRING), - impl(nullMissingHandling( - (SerializableTriFunction) - TextFunction::exprLocate), INTEGER, STRING, STRING, INTEGER)); + return define( + BuiltinFunctionName.LOCATE.getName(), + impl( + nullMissingHandling( + (SerializableBiFunction) TextFunction::exprLocate), + INTEGER, + STRING, + STRING), + impl( + nullMissingHandling( + (SerializableTriFunction) + TextFunction::exprLocate), + INTEGER, + STRING, + STRING, + INTEGER)); } /** - * Returns the position of the first occurrence of a substring in a string starting from 1. - * Returns 0 if substring is not in string. - * Returns NULL if any argument is NULL. - * Supports following signature: + * Returns the position of the first occurrence of a substring in a string starting from 1. + *
+ * Returns 0 if substring is not in string.
+ * Returns NULL if any argument is NULL.
+ * Supports following signature:
* (STRING IN STRING) -> INTEGER */ private DefaultFunctionResolver position() { - return define(BuiltinFunctionName.POSITION.getName(), - impl(nullMissingHandling( - (SerializableBiFunction) - TextFunction::exprLocate), INTEGER, STRING, STRING)); + return define( + BuiltinFunctionName.POSITION.getName(), + impl( + nullMissingHandling( + (SerializableBiFunction) TextFunction::exprLocate), + INTEGER, + STRING, + STRING)); } /** - * REPLACE(str, from_str, to_str) returns the string str with all occurrences of - * the string from_str replaced by the string to_str. - * REPLACE() performs a case-sensitive match when searching for from_str. - * Supports following signature: + * REPLACE(str, from_str, to_str) returns the string str with all occurrences of
+ * the string from_str replaced by the string to_str.

+ * REPLACE() performs a case-sensitive match when searching for from_str.
+ * Supports following signature:
* (STRING, STRING, STRING) -> STRING */ private DefaultFunctionResolver replace() { - return define(BuiltinFunctionName.REPLACE.getName(), + return define( + BuiltinFunctionName.REPLACE.getName(), impl(nullMissingHandling(TextFunction::exprReplace), STRING, STRING, STRING, STRING)); } /** - * REVERSE(str) returns reversed string of the string supplied as an argument - * Returns NULL if the argument is NULL. - * Supports the following signature: + * REVERSE(str) returns reversed string of the string supplied as an argument

+ * Returns NULL if the argument is NULL.
+ * Supports the following signature:
* (STRING) -> STRING */ private DefaultFunctionResolver reverse() { - return define(BuiltinFunctionName.REVERSE.getName(), + return define( + BuiltinFunctionName.REVERSE.getName(), impl(nullMissingHandling(TextFunction::exprReverse), STRING, STRING)); } @@ -321,7 +378,7 @@ private static ExprValue exprSubstrStart(ExprValue exprValue, ExprValue start) { } private static ExprValue exprSubstrStartLength( - ExprValue exprValue, ExprValue start, ExprValue length) { + ExprValue exprValue, ExprValue start, ExprValue length) { int startIdx = start.integerValue(); int len = length.integerValue(); if ((startIdx == 0) || (len == 0)) { diff --git a/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java b/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java index 24751633de..2030ce8062 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/WindowDefinition.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window; import static org.opensearch.sql.ast.tree.Sort.SortOption; @@ -16,9 +15,7 @@ import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.expression.Expression; -/** - * Window definition that consists of partition and sort by information for a window. - */ +/** Window definition that consists of partition and sort by information for a window. */ @Data public class WindowDefinition { @@ -27,7 +24,8 @@ public class WindowDefinition { /** * Return all items in partition by and sort list. - * @return all sort items + * + * @return all sort items */ public List> getAllSortItems() { List> allSorts = new ArrayList<>(); @@ -35,5 +33,4 @@ public List> getAllSortItems() { allSorts.addAll(sortList); return allSorts; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java index a15919bf03..73f0734953 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctionExpression.java @@ -3,27 +3,27 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window; import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.window.frame.WindowFrame; -/** - * Window function abstraction. - */ +/** Window function abstraction. */ public interface WindowFunctionExpression extends Expression { /** - * Create specific window frame based on window definition and what's current window function. - * For now two types of cumulative window frame is returned: - * 1. Ranking window functions: ignore frame definition and always operates on - * previous and current row. - * 2. Aggregate window functions: frame partition into peers and sliding window is not supported. + * Create specific window frame based on window definition and what's current window function. For + * now two types of cumulative window frame is returned: + * + *
    + *
  1. Ranking window functions: ignore frame definition and always operates on previous and + * current row. + *
  2. Aggregate window functions: frame partition into peers and sliding window is not + * supported. + *
* * @param definition window definition - * @return window frame + * @return window frame */ WindowFrame createWindowFrame(WindowDefinition definition); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java index 9a9e0c4c86..3df59c52c0 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/WindowFunctions.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window; import static java.util.Collections.emptyList; @@ -22,16 +21,14 @@ import org.opensearch.sql.expression.window.ranking.RankingWindowFunction; import org.opensearch.sql.expression.window.ranking.RowNumberFunction; -/** - * Window functions that register all window functions in function repository. - */ +/** Window functions that register all window functions in function repository. */ @UtilityClass public class WindowFunctions { /** * Register all window functions to function repository. * - * @param repository function repository + * @param repository function repository */ public void register(BuiltinFunctionRepository repository) { repository.register(rowNumber()); @@ -51,11 +48,11 @@ private DefaultFunctionResolver denseRank() { return rankingFunction(BuiltinFunctionName.DENSE_RANK.getName(), DenseRankFunction::new); } - private DefaultFunctionResolver rankingFunction(FunctionName functionName, - Supplier constructor) { + private DefaultFunctionResolver rankingFunction( + FunctionName functionName, Supplier constructor) { FunctionSignature functionSignature = new FunctionSignature(functionName, emptyList()); FunctionBuilder functionBuilder = (functionProperties, arguments) -> constructor.get(); - return new DefaultFunctionResolver(functionName, - ImmutableMap.of(functionSignature, functionBuilder)); + return new DefaultFunctionResolver( + functionName, ImmutableMap.of(functionSignature, functionBuilder)); } } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java index 604f65e6ff..63922ac3fd 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/aggregation/AggregateWindowFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.aggregation; import java.util.List; @@ -21,9 +20,7 @@ import org.opensearch.sql.expression.window.frame.PeerRowsWindowFrame; import org.opensearch.sql.expression.window.frame.WindowFrame; -/** - * Aggregate function adapter that adapts Aggregator for window operator use. - */ +/** Aggregate function adapter that adapts Aggregator for window operator use. */ @EqualsAndHashCode @RequiredArgsConstructor public class AggregateWindowFunction implements WindowFunctionExpression { @@ -64,5 +61,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public String toString() { return aggregator.toString(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java b/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java index 06b19a1488..359486a4ef 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/frame/CurrentRowWindowFrame.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.frame; import com.google.common.collect.PeekingIterator; @@ -21,18 +20,17 @@ import org.opensearch.sql.expression.window.WindowDefinition; /** - * Conceptually, cumulative window frame should hold all seen rows till next partition. - * This class is actually an optimized version that only hold previous and current row. This is - * efficient and sufficient for ranking and aggregate window function support for now, though need - * to add "real" cumulative frame implementation in future as needed. + * Conceptually, cumulative window frame should hold all seen rows till next partition. This class + * is actually an optimized version that only hold previous and current row. This is efficient and + * sufficient for ranking and aggregate window function support for now, though need to add "real" + * cumulative frame implementation in future as needed. */ @EqualsAndHashCode @RequiredArgsConstructor @ToString public class CurrentRowWindowFrame implements WindowFrame { - @Getter - private final WindowDefinition windowDefinition; + @Getter private final WindowDefinition windowDefinition; private ExprValue previous; private ExprValue current; @@ -67,14 +65,12 @@ public ExprValue previous() { private List resolve(List expressions, ExprValue row) { Environment valueEnv = row.bindingTuples(); - return expressions.stream() - .map(expr -> expr.valueOf(valueEnv)) - .collect(Collectors.toList()); + return expressions.stream().map(expr -> expr.valueOf(valueEnv)).collect(Collectors.toList()); } /** - * Current row window frame won't pre-fetch any row ahead. - * So always return false as nothing "cached" in frame. + * Current row window frame won't pre-fetch any row ahead. So always return false as nothing + * "cached" in frame. */ @Override public boolean hasNext() { @@ -85,5 +81,4 @@ public boolean hasNext() { public List next() { return Collections.emptyList(); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java b/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java index a3e8de40c1..a98826d333 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/frame/PeerRowsWindowFrame.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.frame; import com.google.common.collect.PeekingIterator; @@ -19,9 +18,9 @@ import org.opensearch.sql.expression.window.WindowDefinition; /** - * Window frame that only keep peers (tuples with same value of fields specified in sort list - * in window definition). See PeerWindowFrameTest for details about how this window frame - * interacts with window operator and window function. + * Window frame that only keep peers (tuples with same value of fields specified in sort list in + * window definition). See PeerWindowFrameTest for details about how this window frame interacts + * with window operator and window function. */ @RequiredArgsConstructor public class PeerRowsWindowFrame implements WindowFrame { @@ -29,34 +28,27 @@ public class PeerRowsWindowFrame implements WindowFrame { private final WindowDefinition windowDefinition; /** - * All peer rows (peer means rows in a partition that share same sort key - * based on sort list in window definition. + * All peer rows (peer means rows in a partition that share same sort key based on sort list in + * window definition. */ private final List peers = new ArrayList<>(); - /** - * Which row in the peer is currently being enriched by window function. - */ + /** Which row in the peer is currently being enriched by window function. */ private int position; - /** - * Does row at current position represents a new partition. - */ + /** Does row at current position represents a new partition. */ private boolean isNewPartition = true; - /** - * If any more pre-fetched rows not returned to window operator yet. - */ + /** If any more pre-fetched rows not returned to window operator yet. */ @Override public boolean hasNext() { return position < peers.size(); } /** - * Move position and clear new partition flag. - * Note that because all peer rows have same result from window function, - * this is only returned at first time to change window function state. - * Afterwards, empty list is returned to avoid changes until next peer loaded. + * Move position and clear new partition flag. Note that because all peer rows have same result + * from window function, this is only returned at first time to change window function state. + * Afterward, empty list is returned to avoid changes until next peer loaded. * * @return all rows for the peer */ @@ -70,8 +62,9 @@ public List next() { } /** - * Current row at the position. Because rows are pre-fetched here, - * window operator needs to get them from here too. + * Current row at the position. Because rows are pre-fetched here, window operator needs to get + * them from here too. + * * @return row at current position that being enriched by window function */ @Override @@ -82,11 +75,16 @@ public ExprValue current() { /** * Preload all peer rows if last peer rows done. Note that when no more data in peeking iterator, * there must be rows in frame (hasNext()=true), so no need to check it.hasNext() in this method. - * Load until: - * 1. Different peer found (row with different sort key) - * 2. Or new partition (row with different partition key) - * 3. Or no more rows - * @param it rows iterator + *
+ * Load until:
+ * + *
    + *
  1. Different peer found (row with different sort key) + *
  2. Or new partition (row with different partition key) + *
  3. Or no more rows + *
+ * + * @param it rows iterator */ @Override public void load(PeekingIterator it) { @@ -118,10 +116,7 @@ public boolean isNewPartition() { private boolean isPeer(ExprValue next) { List sortFields = - windowDefinition.getSortList() - .stream() - .map(Pair::getRight) - .collect(Collectors.toList()); + windowDefinition.getSortList().stream().map(Pair::getRight).collect(Collectors.toList()); ExprValue last = peers.get(peers.size() - 1); return resolve(sortFields, last).equals(resolve(sortFields, next)); @@ -139,9 +134,6 @@ private boolean isSamePartition(ExprValue next) { private List resolve(List expressions, ExprValue row) { Environment valueEnv = row.bindingTuples(); - return expressions.stream() - .map(expr -> expr.valueOf(valueEnv)) - .collect(Collectors.toList()); + return expressions.stream().map(expr -> expr.valueOf(valueEnv)).collect(Collectors.toList()); } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java b/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java index 323656547f..657f63e4c9 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/frame/WindowFrame.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.frame; import com.google.common.collect.PeekingIterator; @@ -14,13 +13,17 @@ import org.opensearch.sql.expression.env.Environment; /** - * Window frame that represents a subset of a window which is all data accessible to - * the window function when calculation. Basically there are 3 types of window frame: - * 1) Entire window frame that holds all data of the window - * 2) Cumulative window frame that accumulates one row by another - * 3) Sliding window frame that maintains a sliding window of fixed size - * Note that which type of window frame is used is determined by both window function itself - * and frame definition in a window definition. + * Window frame that represents a subset of a window which is all data accessible to the window + * function when calculation. Basically there are 3 types of window frame: + * + *
    + *
  1. Entire window frame that holds all data of the window + *
  2. Cumulative window frame that accumulates one row by another + *
  3. Sliding window frame that maintains a sliding window of fixed size + *
+ * + * Note that which type of window frame is used is determined by both window function itself and + * frame definition in a window definition. */ public interface WindowFrame extends Environment, Iterator> { @@ -31,20 +34,22 @@ default ExprValue resolve(Expression var) { /** * Check is current row the beginning of a new partition according to window definition. - * @return true if a new partition begins here, otherwise false. + * + * @return true if a new partition begins here, otherwise false. */ boolean isNewPartition(); /** * Load one or more rows as window function calculation needed. - * @param iterator peeking iterator that can peek next element without moving iterator + * + * @param iterator peeking iterator that can peek next element without moving iterator */ void load(PeekingIterator iterator); /** * Get current data row for giving window operator chance to get rows preloaded into frame. + * * @return data row */ ExprValue current(); - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java index ba6e88d98d..87506ef63e 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/DenseRankFunction.java @@ -3,15 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.window.frame.CurrentRowWindowFrame; /** - * Dense rank window function that assigns a rank number to each row similarly as - * rank function. The difference is there is no gap between rank number assigned. + * Dense rank window function that assigns a rank number to each row similarly as rank function. The + * difference is there is no gap between rank number assigned. */ public class DenseRankFunction extends RankingWindowFunction { @@ -30,5 +29,4 @@ protected int rank(CurrentRowWindowFrame frame) { } return rank; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java index c1f33e6137..f72a28cd9a 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankFunction.java @@ -3,22 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.window.frame.CurrentRowWindowFrame; /** - * Rank window function that assigns a rank number to each row based on sort items - * defined in window definition. Use same rank number if sort item values same on - * previous and current row. + * Rank window function that assigns a rank number to each row based on sort items defined in window + * definition. Use same rank number if sort item values same on previous and current row. */ public class RankFunction extends RankingWindowFunction { - /** - * Total number of rows have seen in current partition. - */ + /** Total number of rows have seen in current partition. */ private int total; public RankFunction() { @@ -38,5 +34,4 @@ protected int rank(CurrentRowWindowFrame frame) { } return rank; } - } diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java index 07a4b42dbd..c119629cda 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RankingWindowFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import static java.util.Collections.emptyList; @@ -30,11 +29,9 @@ * such as same return type (integer), same argument list (no arg). */ public abstract class RankingWindowFunction extends FunctionExpression - implements WindowFunctionExpression { + implements WindowFunctionExpression { - /** - * Current rank number assigned. - */ + /** Current rank number assigned. */ protected int rank; public RankingWindowFunction(FunctionName functionName) { @@ -58,26 +55,27 @@ public ExprValue valueOf(Environment valueEnv) { /** * Rank logic that sub-class needs to implement. - * @param frame window frame - * @return rank number + * + * @param frame window frame + * @return rank number */ protected abstract int rank(CurrentRowWindowFrame frame); /** * Check sort field to see if current value is different from previous. - * @param frame window frame - * @return true if different, false if same or no sort list defined + * + * @param frame window frame + * @return true if different, false if same or no sort list defined */ protected boolean isSortFieldValueDifferent(CurrentRowWindowFrame frame) { if (isSortItemsNotDefined(frame)) { return false; } - List sortItems = frame.getWindowDefinition() - .getSortList() - .stream() - .map(Pair::getRight) - .collect(Collectors.toList()); + List sortItems = + frame.getWindowDefinition().getSortList().stream() + .map(Pair::getRight) + .collect(Collectors.toList()); List previous = resolve(frame, sortItems, frame.previous()); List current = resolve(frame, sortItems, frame.current()); @@ -90,9 +88,7 @@ private boolean isSortItemsNotDefined(CurrentRowWindowFrame frame) { private List resolve(WindowFrame frame, List expressions, ExprValue row) { BindingTuple valueEnv = row.bindingTuples(); - return expressions.stream() - .map(expr -> expr.valueOf(valueEnv)) - .collect(Collectors.toList()); + return expressions.stream().map(expr -> expr.valueOf(valueEnv)).collect(Collectors.toList()); } @Override diff --git a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java index 067dfa569d..90bb2ed8ff 100644 --- a/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java +++ b/core/src/main/java/org/opensearch/sql/expression/window/ranking/RowNumberFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.expression.window.ranking; import org.opensearch.sql.expression.function.BuiltinFunctionName; @@ -25,5 +24,4 @@ protected int rank(CurrentRowWindowFrame frame) { } return rank++; } - } diff --git a/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java b/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java index 94bb8d6936..84cec4c9c7 100644 --- a/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java +++ b/core/src/main/java/org/opensearch/sql/monitor/AlwaysHealthyMonitor.java @@ -3,19 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.monitor; -/** - * Always healthy resource monitor. - */ +/** Always healthy resource monitor. */ public class AlwaysHealthyMonitor extends ResourceMonitor { - public static final ResourceMonitor ALWAYS_HEALTHY_MONITOR = - new AlwaysHealthyMonitor(); + public static final ResourceMonitor ALWAYS_HEALTHY_MONITOR = new AlwaysHealthyMonitor(); - /** - * always healthy. - */ + /** always healthy. */ @Override public boolean isHealthy() { return true; diff --git a/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java b/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java index ce76a3f982..bbd1c67a62 100644 --- a/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java +++ b/core/src/main/java/org/opensearch/sql/monitor/ResourceMonitor.java @@ -3,12 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.monitor; /** - * The abstract interface of ResourceMonitor. - * When an fault is detected, the circuit breaker is open. + * The abstract interface of ResourceMonitor. When an fault is detected, the circuit breaker is + * open. */ public abstract class ResourceMonitor { /** diff --git a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java index 699d0ec76a..b53d17b38f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java +++ b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java @@ -45,13 +45,12 @@ /** * Default implementor for implementing logical to physical translation. "Default" here means all - * logical operator will be translated to correspondent physical operator to pipeline operations - * in post-processing style in memory. - * Different storage can override methods here to optimize default pipelining operator, for example - * a storage has the flexibility to override visitFilter and visitRelation to push down filtering - * operation and return a single physical index scan operator. + * logical operator will be translated to correspondent physical operator to pipeline operations in + * post-processing style in memory. Different storage can override methods here to optimize default + * pipelining operator, for example a storage has the flexibility to override visitFilter and + * visitRelation to push down filtering operation and return a single physical index scan operator. * - * @param context type + * @param context type */ public class DefaultImplementor extends LogicalPlanNodeVisitor { @@ -62,8 +61,7 @@ public PhysicalPlan visitRareTopN(LogicalRareTopN node, C context) { node.getCommandType(), node.getNoOfResults(), node.getFieldList(), - node.getGroupByList() - ); + node.getGroupByList()); } @Override @@ -78,16 +76,14 @@ public PhysicalPlan visitDedupe(LogicalDedupe node, C context) { @Override public PhysicalPlan visitProject(LogicalProject node, C context) { - return new ProjectOperator(visitChild(node, context), node.getProjectList(), - node.getNamedParseExpressions()); + return new ProjectOperator( + visitChild(node, context), node.getProjectList(), node.getNamedParseExpressions()); } @Override public PhysicalPlan visitWindow(LogicalWindow node, C context) { return new WindowOperator( - visitChild(node, context), - node.getWindowFunction(), - node.getWindowDefinition()); + visitChild(node, context), node.getWindowFunction(), node.getWindowDefinition()); } @Override @@ -148,8 +144,9 @@ public PhysicalPlan visitTableWriteBuilder(TableWriteBuilder plan, C context) { @Override public PhysicalPlan visitRelation(LogicalRelation node, C context) { - throw new UnsupportedOperationException("Storage engine is responsible for " - + "implementing and optimizing logical plan with relation involved"); + throw new UnsupportedOperationException( + "Storage engine is responsible for " + + "implementing and optimizing logical plan with relation involved"); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/PlanContext.java b/core/src/main/java/org/opensearch/sql/planner/PlanContext.java index 3d43c02d61..38f2bde244 100644 --- a/core/src/main/java/org/opensearch/sql/planner/PlanContext.java +++ b/core/src/main/java/org/opensearch/sql/planner/PlanContext.java @@ -9,13 +9,10 @@ import lombok.Getter; import org.opensearch.sql.storage.split.Split; -/** - * Plan context hold planning related information. - */ +/** Plan context hold planning related information. */ public class PlanContext { - @Getter - private final Optional split; + @Getter private final Optional split; public PlanContext(Split split) { this.split = Optional.of(split); diff --git a/core/src/main/java/org/opensearch/sql/planner/PlanNode.java b/core/src/main/java/org/opensearch/sql/planner/PlanNode.java index 8cd6e088e5..a79997cd7f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/PlanNode.java +++ b/core/src/main/java/org/opensearch/sql/planner/PlanNode.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner; import java.util.List; -/** - * The definition of Plan Node. - */ +/** The definition of Plan Node. */ public interface PlanNode { /** diff --git a/core/src/main/java/org/opensearch/sql/planner/Planner.java b/core/src/main/java/org/opensearch/sql/planner/Planner.java index 8333425091..1397fa8a18 100644 --- a/core/src/main/java/org/opensearch/sql/planner/Planner.java +++ b/core/src/main/java/org/opensearch/sql/planner/Planner.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner; - import java.util.List; import lombok.RequiredArgsConstructor; import org.opensearch.sql.planner.logical.LogicalPlan; @@ -16,17 +14,15 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.Table; -/** - * Planner that plans and chooses the optimal physical plan. - */ +/** Planner that plans and chooses the optimal physical plan. */ @RequiredArgsConstructor public class Planner { private final LogicalPlanOptimizer logicalOptimizer; /** - * Generate optimal physical plan for logical plan. If no table involved, - * translate logical plan to physical by default implementor. + * Generate optimal physical plan for logical plan. If no table involved, translate logical plan + * to physical by default implementor.
* TODO: for now just delegate entire logical plan to storage engine. * * @param plan logical plan @@ -37,28 +33,28 @@ public PhysicalPlan plan(LogicalPlan plan) { if (table == null) { return plan.accept(new DefaultImplementor<>(), null); } - return table.implement( - table.optimize(optimize(plan))); + return table.implement(table.optimize(optimize(plan))); } private Table findTable(LogicalPlan plan) { - return plan.accept(new LogicalPlanNodeVisitor() { - - @Override - public Table visitNode(LogicalPlan node, Object context) { - List children = node.getChild(); - if (children.isEmpty()) { - return null; - } - return children.get(0).accept(this, context); - } - - @Override - public Table visitRelation(LogicalRelation node, Object context) { - return node.getTable(); - } - - }, null); + return plan.accept( + new LogicalPlanNodeVisitor() { + + @Override + public Table visitNode(LogicalPlan node, Object context) { + List children = node.getChild(); + if (children.isEmpty()) { + return null; + } + return children.get(0).accept(this, context); + } + + @Override + public Table visitRelation(LogicalRelation node, Object context) { + return node.getTable(); + } + }, + null); } private LogicalPlan optimize(LogicalPlan plan) { diff --git a/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java b/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java index ab195da5bf..1503946abc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/SerializablePlan.java @@ -10,36 +10,37 @@ /** * All subtypes of PhysicalPlan which needs to be serialized (in cursor, for pagination feature) * should follow one of the following options. + * *
    *
  • Both: - *
      - *
    • Override both methods from {@link Externalizable}.
    • - *
    • Define a public no-arg constructor.
    • - *
    - *
  • - *
  • - * Overwrite {@link #getPlanForSerialization} to return - * another instance of {@link SerializablePlan}. - *
  • + *
      + *
    • Override both methods from {@link Externalizable}. + *
    • Define a public no-arg constructor. + *
    + *
  • Overwrite {@link #getPlanForSerialization} to return another instance of {@link + * SerializablePlan}. *
*/ public interface SerializablePlan extends Externalizable { /** - * Override to return child or delegated plan, so parent plan should skip this one - * for serialization, but it should try to serialize grandchild plan. - * Imagine plan structure like this + * Override to return child or delegated plan, so parent plan should skip this one for + * serialization, but it should try to serialize grandchild plan. Imagine plan structure like this + * *
    *    A         -> this
    *    `- B      -> child
    *      `- C    -> this
    * 
- * In that case only plans A and C should be attempted to serialize. - * It is needed to skip a `ResourceMonitorPlan` instance only, actually. * - *
{@code
-   *    * A.writeObject(B.getPlanForSerialization());
-   *  }
+ * In that case only plans A and C should be attempted to serialize. It is needed to skip a + * `ResourceMonitorPlan` instance only, actually. + * + *
{@code
+   * * A.writeObject(B.getPlanForSerialization());
+   *
+   * }
+ * * @return Next plan for serialization. */ default SerializablePlan getPlanForSerialization() { diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java index c8c04b1817..25dbd14f1a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAD.java @@ -18,6 +18,7 @@ public class LogicalAD extends LogicalPlan { /** * Constructor of LogicalAD. + * * @param child child logical plan * @param arguments arguments of the algorithm */ diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java index ebca01cdf8..ecbcece623 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalAggregation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -14,26 +13,18 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; -/** - * Logical Aggregation. - */ +/** Logical Aggregation. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalAggregation extends LogicalPlan { - @Getter - private final List aggregatorList; + @Getter private final List aggregatorList; - @Getter - private final List groupByList; + @Getter private final List groupByList; - /** - * Constructor of LogicalAggregation. - */ + /** Constructor of LogicalAggregation. */ public LogicalAggregation( - LogicalPlan child, - List aggregatorList, - List groupByList) { + LogicalPlan child, List aggregatorList, List groupByList) { super(Collections.singletonList(child)); this.aggregatorList = aggregatorList; this.groupByList = groupByList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java index e5c30a4f4f..d1b98df8ed 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalCloseCursor.java @@ -10,8 +10,8 @@ import lombok.ToString; /** - * A logical plan node which wraps {@link org.opensearch.sql.planner.LogicalCursor} - * and represent a cursor close operation. + * A logical plan node which wraps {@link org.opensearch.sql.planner.LogicalCursor} and represent a + * cursor close operation. */ @ToString @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java index 020352287d..92734440f7 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalDedupe.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Arrays; @@ -13,9 +12,7 @@ import lombok.ToString; import org.opensearch.sql.expression.Expression; -/** - * Logical Dedupe Plan. - */ +/** Logical Dedupe Plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -26,12 +23,12 @@ public class LogicalDedupe extends LogicalPlan { private final Boolean keepEmpty; private final Boolean consecutive; - /** - * Constructor of LogicalDedupe. - */ + /** Constructor of LogicalDedupe. */ public LogicalDedupe( LogicalPlan child, - List dedupeList, Integer allowedDuplication, Boolean keepEmpty, + List dedupeList, + Integer allowedDuplication, + Boolean keepEmpty, Boolean consecutive) { super(Arrays.asList(child)); this.dedupeList = dedupeList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java index 8ec0b84dad..e7b8f353bc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalEval.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -24,15 +23,10 @@ @EqualsAndHashCode(callSuper = true) public class LogicalEval extends LogicalPlan { - @Getter - private final List> expressions; + @Getter private final List> expressions; - /** - * Constructor of LogicalEval. - */ - public LogicalEval( - LogicalPlan child, - List> expressions) { + /** Constructor of LogicalEval. */ + public LogicalEval(LogicalPlan child, List> expressions) { super(Collections.singletonList(child)); this.expressions = expressions; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java index e4a0482aac..ca16b41597 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFetchCursor.java @@ -9,25 +9,17 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; -import org.opensearch.sql.planner.logical.LogicalPlan; -import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; import org.opensearch.sql.storage.StorageEngine; -/** - * A plan node which represents operation of fetching a next page from the cursor. - */ +/** A plan node which represents operation of fetching a next page from the cursor. */ @EqualsAndHashCode(callSuper = false) @ToString public class LogicalFetchCursor extends LogicalPlan { - @Getter - private final String cursor; + @Getter private final String cursor; - @Getter - private final StorageEngine engine; + @Getter private final StorageEngine engine; - /** - * LogicalCursor constructor. Does not have child plans. - */ + /** LogicalCursor constructor. Does not have child plans. */ public LogicalFetchCursor(String cursor, StorageEngine engine) { super(List.of()); this.cursor = cursor; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java index 78887ad448..49280e8709 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalFilter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -12,19 +11,14 @@ import lombok.ToString; import org.opensearch.sql.expression.Expression; -/** - * Logical Filter represent the filter relation. - */ +/** Logical Filter represent the filter relation. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalFilter extends LogicalPlan { - @Getter - private final Expression condition; + @Getter private final Expression condition; - /** - * Constructor of LogicalFilter. - */ + /** Constructor of LogicalFilter. */ public LogicalFilter(LogicalPlan child, Expression condition) { super(Collections.singletonList(child)); this.condition = condition; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java index c1e873a00d..41fcd48f81 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalHighlight.java @@ -20,11 +20,9 @@ public class LogicalHighlight extends LogicalPlan { private final Expression highlightField; private final Map arguments; - /** - * Constructor of LogicalHighlight. - */ - public LogicalHighlight(LogicalPlan childPlan, Expression highlightField, - Map arguments) { + /** Constructor of LogicalHighlight. */ + public LogicalHighlight( + LogicalPlan childPlan, Expression highlightField, Map arguments) { super(Collections.singletonList(childPlan)); this.highlightField = highlightField; this.arguments = arguments; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java index e6253cb2cc..bec77d9b6f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalLimit.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -18,9 +17,7 @@ public class LogicalLimit extends LogicalPlan { private final Integer limit; private final Integer offset; - /** - * Constructor of LogicalLimit. - */ + /** Constructor of LogicalLimit. */ public LogicalLimit(LogicalPlan input, Integer limit, Integer offset) { super(Collections.singletonList(input)); this.limit = limit; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java index c54ee92e08..780e0bba94 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalML.java @@ -7,17 +7,16 @@ import lombok.ToString; import org.opensearch.sql.ast.expression.Literal; -/** - * ML logical plan. - */ +/** ML logical plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) public class LogicalML extends LogicalPlan { - private final Map arguments; + private final Map arguments; /** * Constructor of LogicalML. + * * @param child child logical plan * @param arguments arguments of the algorithm */ diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java index 22771b42de..cfc313a68d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalMLCommons.java @@ -7,25 +7,23 @@ import lombok.ToString; import org.opensearch.sql.ast.expression.Literal; -/** - * ml-commons logical plan. - */ +/** ml-commons logical plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) public class LogicalMLCommons extends LogicalPlan { private final String algorithm; - private final Map arguments; + private final Map arguments; /** * Constructor of LogicalMLCommons. + * * @param child child logical plan * @param algorithm algorithm name * @param arguments arguments of the algorithm */ - public LogicalMLCommons(LogicalPlan child, String algorithm, - Map arguments) { + public LogicalMLCommons(LogicalPlan child, String algorithm, Map arguments) { super(Collections.singletonList(child)); this.algorithm = algorithm; this.arguments = arguments; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java index 3e0e167cf3..e791a1fad1 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalNested.java @@ -14,9 +14,7 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Logical Nested plan. - */ +/** Logical Nested plan. */ @EqualsAndHashCode(callSuper = true) @Getter @ToString @@ -24,15 +22,11 @@ public class LogicalNested extends LogicalPlan { private List> fields; private final List projectList; - /** - * Constructor of LogicalNested. - * - */ + /** Constructor of LogicalNested. */ public LogicalNested( LogicalPlan childPlan, List> fields, - List projectList - ) { + List projectList) { super(Collections.singletonList(childPlan)); this.fields = fields; this.projectList = projectList; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java index 372f9dcf0b..bd9f20e055 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java @@ -10,14 +10,11 @@ import lombok.Getter; import lombok.ToString; -/** - * LogicalPaginate represents pagination operation for underlying plan. - */ +/** LogicalPaginate represents pagination operation for underlying plan. */ @ToString @EqualsAndHashCode(callSuper = false) public class LogicalPaginate extends LogicalPlan { - @Getter - private final int pageSize; + @Getter private final int pageSize; public LogicalPaginate(int pageSize, List childPlans) { super(childPlans); diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java index ad4a0b3794..2bc1a8756f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlan.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.List; import lombok.EqualsAndHashCode; import org.opensearch.sql.planner.PlanNode; -/** - * The abstract base class for all the Logical Plan node. - */ +/** The abstract base class for all the Logical Plan node. */ @EqualsAndHashCode(callSuper = false) public abstract class LogicalPlan implements PlanNode { @@ -27,8 +24,8 @@ public LogicalPlan(List childPlans) { * * @param visitor visitor. * @param context visitor context. - * @param returned object type. - * @param context type. + * @param returned object type. + * @param context type. * @return returned object. */ public abstract R accept(LogicalPlanNodeVisitor visitor, C context); @@ -38,7 +35,6 @@ public LogicalPlan replaceChildPlans(List childPlans) { return this; } - @Override public List getChild() { return childPlans; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java index c0e253ca50..2a886ba0ca 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanDSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -25,9 +24,7 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** - * Logical Plan DSL. - */ +/** Logical Plan DSL. */ @UtilityClass public class LogicalPlanDSL { @@ -57,7 +54,7 @@ public static LogicalPlan rename( return new LogicalRename(input, renameMap); } - public static LogicalPlan paginate(LogicalPlan input, int fetchSize) { + public static LogicalPlan paginate(LogicalPlan input, int fetchSize) { return new LogicalPaginate(fetchSize, List.of(input)); } @@ -65,23 +62,23 @@ public static LogicalPlan project(LogicalPlan input, NamedExpression... fields) return new LogicalProject(input, Arrays.asList(fields), ImmutableList.of()); } - public static LogicalPlan project(LogicalPlan input, List fields, - List namedParseExpressions) { + public static LogicalPlan project( + LogicalPlan input, + List fields, + List namedParseExpressions) { return new LogicalProject(input, fields, namedParseExpressions); } - public LogicalPlan window(LogicalPlan input, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + public LogicalPlan window( + LogicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { return new LogicalWindow(input, windowFunction, windowDefinition); } - public LogicalPlan highlight(LogicalPlan input, Expression field, - Map arguments) { + public LogicalPlan highlight( + LogicalPlan input, Expression field, Map arguments) { return new LogicalHighlight(input, field, arguments); } - public static LogicalPlan nested( LogicalPlan input, List> nestedArgs, @@ -116,13 +113,20 @@ public static LogicalPlan dedupe( input, Arrays.asList(fields), allowedDuplication, keepEmpty, consecutive); } - public static LogicalPlan rareTopN(LogicalPlan input, CommandType commandType, - List groupByList, Expression... fields) { + public static LogicalPlan rareTopN( + LogicalPlan input, + CommandType commandType, + List groupByList, + Expression... fields) { return rareTopN(input, commandType, 10, groupByList, fields); } - public static LogicalPlan rareTopN(LogicalPlan input, CommandType commandType, int noOfResults, - List groupByList, Expression... fields) { + public static LogicalPlan rareTopN( + LogicalPlan input, + CommandType commandType, + int noOfResults, + List groupByList, + Expression... fields) { return new LogicalRareTopN(input, commandType, noOfResults, Arrays.asList(fields), groupByList); } @@ -134,5 +138,4 @@ public LogicalPlan values(List... values) { public static LogicalPlan limit(LogicalPlan input, Integer limit, Integer offset) { return new LogicalLimit(input, limit, offset); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java index dbe21d38e0..156db35306 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import org.opensearch.sql.storage.read.TableScanBuilder; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java index 427ccffc62..5978620480 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalProject.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,21 +12,15 @@ import lombok.ToString; import org.opensearch.sql.expression.NamedExpression; -/** - * Project field specified by the {@link LogicalProject#projectList}. - */ +/** Project field specified by the {@link LogicalProject#projectList}. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalProject extends LogicalPlan { - @Getter - private final List projectList; - @Getter - private final List namedParseExpressions; + @Getter private final List projectList; + @Getter private final List namedParseExpressions; - /** - * Constructor of LogicalProject. - */ + /** Constructor of LogicalProject. */ public LogicalProject( LogicalPlan child, List projectList, diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java index 4744bc590f..2c387eca9c 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRareTopN.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -14,9 +13,7 @@ import org.opensearch.sql.ast.tree.RareTopN.CommandType; import org.opensearch.sql.expression.Expression; -/** - * Logical Rare and TopN Plan. - */ +/** Logical Rare and TopN Plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -27,12 +24,11 @@ public class LogicalRareTopN extends LogicalPlan { private final List fieldList; private final List groupByList; - /** - * Constructor of LogicalRareTopN. - */ + /** Constructor of LogicalRareTopN. */ public LogicalRareTopN( LogicalPlan child, - CommandType commandType, Integer noOfResults, + CommandType commandType, + Integer noOfResults, List fieldList, List groupByList) { super(Collections.singletonList(child)); diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java index a49c3d5cbe..d50e286e1d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -12,22 +11,16 @@ import lombok.ToString; import org.opensearch.sql.storage.Table; -/** - * Logical Relation represent the data source. - */ +/** Logical Relation represent the data source. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRelation extends LogicalPlan { - @Getter - private final String relationName; + @Getter private final String relationName; - @Getter - private final Table table; + @Getter private final Table table; - /** - * Constructor of LogicalRelation. - */ + /** Constructor of LogicalRelation. */ public LogicalRelation(String relationName, Table table) { super(ImmutableList.of()); this.relationName = relationName; diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java index cda7282c40..c1aeda22c7 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRemove.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,22 +12,15 @@ import lombok.ToString; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Remove field specified by the {@link LogicalRemove#removeList}. - */ +/** Remove field specified by the {@link LogicalRemove#removeList}. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRemove extends LogicalPlan { - @Getter - private final Set removeList; + @Getter private final Set removeList; - /** - * Constructor of LogicalRemove. - */ - public LogicalRemove( - LogicalPlan child, - Set removeList) { + /** Constructor of LogicalRemove. */ + public LogicalRemove(LogicalPlan child, Set removeList) { super(Collections.singletonList(child)); this.removeList = removeList; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java index 007a0a6fca..25ee645932 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRename.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -13,23 +12,15 @@ import lombok.ToString; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Rename Operator. - * renameList is list of mapping of source and target. - */ +/** Rename Operator. renameList is list of mapping of source and target. */ @ToString @EqualsAndHashCode(callSuper = true) public class LogicalRename extends LogicalPlan { - @Getter - private final Map renameMap; + @Getter private final Map renameMap; - /** - * Constructor of LogicalRename. - */ - public LogicalRename( - LogicalPlan child, - Map renameMap) { + /** Constructor of LogicalRename. */ + public LogicalRename(LogicalPlan child, Map renameMap) { super(Collections.singletonList(child)); this.renameMap = renameMap; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java index 947411518f..569ca7e309 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalSort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -15,9 +14,7 @@ import org.opensearch.sql.ast.tree.Sort.SortOption; import org.opensearch.sql.expression.Expression; -/** - * Sort Plan. - */ +/** Sort Plan. */ @Getter @ToString @EqualsAndHashCode(callSuper = true) @@ -25,12 +22,8 @@ public class LogicalSort extends LogicalPlan { private final List> sortList; - /** - * Constructor of LogicalSort. - */ - public LogicalSort( - LogicalPlan child, - List> sortList) { + /** Constructor of LogicalSort. */ + public LogicalSort(LogicalPlan child, List> sortList) { super(Collections.singletonList(child)); this.sortList = sortList; } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java index 29d2db54b2..325650db33 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalValues.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import com.google.common.collect.ImmutableList; @@ -14,19 +13,22 @@ import org.opensearch.sql.expression.LiteralExpression; /** - * Logical operator which is a sequence of literal rows (like a relation). - * Basically, Values operator is used to create rows of constant literals - * "out of nothing" which is corresponding with VALUES clause in SQL. - * Mostly all rows must have the same number of literals and each column should - * have same type or can be converted implicitly. - * In particular, typical use cases include: - * 1. Project without relation involved. - * 2. Defining query or insertion without a relation. + * Logical operator which is a sequence of literal rows (like a relation).
+ * Basically, Values operator is used to create rows of constant literals
+ * "out of nothing" which is corresponding with VALUES clause in SQL.
+ * Mostly all rows must have the same number of literals and each column should have same type or + * can be converted implicitly. In particular, typical use cases include: + * + *
    + *
  1. Project without relation involved. + *
  2. Defining query or insertion without a relation. + *
+ * * Take the following logical plan for example: - *
- *  LogicalProject(expr=[log(2),true,1+2])
- *   |_ LogicalValues([[]])  #an empty row so that Project can evaluate its expressions in next()
- *  
+ * + *

LogicalProject(expr=[log(2),true,1+2])
+ *   |_ LogicalValues([[]]) #an empty row so that Project can evaluate its expressions in + * next() */ @ToString @Getter @@ -35,11 +37,8 @@ public class LogicalValues extends LogicalPlan { private final List> values; - /** - * Constructor of LogicalValues. - */ - public LogicalValues( - List> values) { + /** Constructor of LogicalValues. */ + public LogicalValues(List> values) { super(ImmutableList.of()); this.values = values; } @@ -48,5 +47,4 @@ public LogicalValues( public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitValues(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java index 022b284674..00c89410a7 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWindow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.logical; import java.util.Collections; @@ -25,13 +24,9 @@ public class LogicalWindow extends LogicalPlan { private final NamedExpression windowFunction; private final WindowDefinition windowDefinition; - /** - * Constructor of logical window. - */ + /** Constructor of logical window. */ public LogicalWindow( - LogicalPlan child, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + LogicalPlan child, NamedExpression windowFunction, WindowDefinition windowDefinition) { super(Collections.singletonList(child)); this.windowFunction = windowFunction; this.windowDefinition = windowDefinition; @@ -41,5 +36,4 @@ public LogicalWindow( public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitWindow(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java index 496e6009e3..a253739a68 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalWrite.java @@ -12,9 +12,7 @@ import lombok.ToString; import org.opensearch.sql.storage.Table; -/** - * Logical operator for insert statement. - */ +/** Logical operator for insert statement. */ @EqualsAndHashCode(callSuper = true) @Getter @ToString @@ -26,9 +24,7 @@ public class LogicalWrite extends LogicalPlan { /** Optional column name list specified in insert statement. */ private final List columns; - /** - * Construct a logical write with given child node, table and column name list. - */ + /** Construct a logical write with given child node, table and column name list. */ public LogicalWrite(LogicalPlan child, Table table, List columns) { super(Collections.singletonList(child)); this.table = table; diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java index be1227c1da..5c115f0db8 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer; import static com.facebook.presto.matching.DefaultMatcher.DEFAULT_MATCHER; @@ -20,56 +19,53 @@ import org.opensearch.sql.planner.optimizer.rule.write.CreateTableWriteBuilder; /** - * {@link LogicalPlan} Optimizer. - * The Optimizer will run in the TopDown manner. - * 1> Optimize the current node with all the rules. - * 2> Optimize the all the child nodes with all the rules. - * 3) In case the child node could change, Optimize the current node again. + * {@link LogicalPlan} Optimizer.
+ * The Optimizer will run in the TopDown manner.
+ * + *

    + *
  1. Optimize the current node with all the rules. + *
  2. Optimize the all the child nodes with all the rules. + *
  3. In case the child node could change, Optimize the current node again. + *
*/ public class LogicalPlanOptimizer { private final List> rules; - /** - * Create {@link LogicalPlanOptimizer} with customized rules. - */ + /** Create {@link LogicalPlanOptimizer} with customized rules. */ public LogicalPlanOptimizer(List> rules) { this.rules = rules; } - /** - * Create {@link LogicalPlanOptimizer} with pre-defined rules. - */ + /** Create {@link LogicalPlanOptimizer} with pre-defined rules. */ public static LogicalPlanOptimizer create() { - return new LogicalPlanOptimizer(Arrays.asList( - /* - * Phase 1: Transformations that rely on relational algebra equivalence - */ - new MergeFilterAndFilter(), - new PushFilterUnderSort(), - /* - * Phase 2: Transformations that rely on data source push down capability - */ - new CreateTableScanBuilder(), - TableScanPushDown.PUSH_DOWN_FILTER, - TableScanPushDown.PUSH_DOWN_AGGREGATION, - TableScanPushDown.PUSH_DOWN_SORT, - TableScanPushDown.PUSH_DOWN_LIMIT, - new PushDownPageSize(), - TableScanPushDown.PUSH_DOWN_HIGHLIGHT, - TableScanPushDown.PUSH_DOWN_NESTED, - TableScanPushDown.PUSH_DOWN_PROJECT, - new CreateTableWriteBuilder())); + return new LogicalPlanOptimizer( + Arrays.asList( + /* + * Phase 1: Transformations that rely on relational algebra equivalence + */ + new MergeFilterAndFilter(), + new PushFilterUnderSort(), + /* + * Phase 2: Transformations that rely on data source push down capability + */ + new CreateTableScanBuilder(), + TableScanPushDown.PUSH_DOWN_FILTER, + TableScanPushDown.PUSH_DOWN_AGGREGATION, + TableScanPushDown.PUSH_DOWN_SORT, + TableScanPushDown.PUSH_DOWN_LIMIT, + new PushDownPageSize(), + TableScanPushDown.PUSH_DOWN_HIGHLIGHT, + TableScanPushDown.PUSH_DOWN_NESTED, + TableScanPushDown.PUSH_DOWN_PROJECT, + new CreateTableWriteBuilder())); } - /** - * Optimize {@link LogicalPlan}. - */ + /** Optimize {@link LogicalPlan}. */ public LogicalPlan optimize(LogicalPlan plan) { LogicalPlan optimized = internalOptimize(plan); optimized.replaceChildPlans( - optimized.getChild().stream().map(this::optimize).collect( - Collectors.toList())); + optimized.getChild().stream().map(this::optimize).collect(Collectors.toList())); return internalOptimize(optimized); } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java index 8150de824d..5201c83c25 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/PushDownPageSize.java @@ -14,15 +14,12 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * A {@link LogicalPlanOptimizer} rule that pushes down page size - * to table scan builder. - */ +/** A {@link LogicalPlanOptimizer} rule that pushes down page size to table scan builder. */ public class PushDownPageSize implements Rule { @Override public Pattern pattern() { return Pattern.typeOf(LogicalPaginate.class) - .matching(lp -> findTableScanBuilder(lp).isPresent()); + .matching(lp -> findTableScanBuilder(lp).isPresent()); } @Override @@ -44,7 +41,7 @@ private Optional findTableScanBuilder(LogicalPaginate logicalP if (children.stream().anyMatch(TableScanBuilder.class::isInstance)) { if (children.size() > 1) { throw new UnsupportedOperationException( - "Unsupported plan: relation operator cannot have siblings"); + "Unsupported plan: relation operator cannot have siblings"); } return Optional.of((TableScanBuilder) children.get(0)); } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java index 123754d3d0..b06ca3e968 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/Rule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer; import com.facebook.presto.matching.Captures; @@ -12,17 +11,17 @@ /** * Optimization Rule. + * * @param LogicalPlan. */ public interface Rule { - /** - * Get the {@link Pattern}. - */ + /** Get the {@link Pattern}. */ Pattern pattern(); /** * Apply the Rule to the LogicalPlan. + * * @param plan LogicalPlan which match the Pattern. * @param captures A list of LogicalPlan which are captured by the Pattern. * @return the transfromed LogicalPlan. diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java index 8f5ac86580..ee4e9a20cc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer.pattern; import com.facebook.presto.matching.Capture; @@ -25,108 +24,89 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * Pattern helper class. - */ +/** Pattern helper class. */ @UtilityClass public class Patterns { - /** - * Logical filter with a given pattern on inner field. - */ + /** Logical filter with a given pattern on inner field. */ public static Pattern filter(Pattern pattern) { return Pattern.typeOf(LogicalFilter.class).with(source(pattern)); } - /** - * Logical aggregate operator with a given pattern on inner field. - */ + /** Logical aggregate operator with a given pattern on inner field. */ public static Pattern aggregate(Pattern pattern) { return Pattern.typeOf(LogicalAggregation.class).with(source(pattern)); } - /** - * Logical sort operator with a given pattern on inner field. - */ + /** Logical sort operator with a given pattern on inner field. */ public static Pattern sort(Pattern pattern) { return Pattern.typeOf(LogicalSort.class).with(source(pattern)); } - /** - * Logical limit operator with a given pattern on inner field. - */ + /** Logical limit operator with a given pattern on inner field. */ public static Pattern limit(Pattern pattern) { return Pattern.typeOf(LogicalLimit.class).with(source(pattern)); } - /** - * Logical highlight operator with a given pattern on inner field. - */ + /** Logical highlight operator with a given pattern on inner field. */ public static Pattern highlight(Pattern pattern) { return Pattern.typeOf(LogicalHighlight.class).with(source(pattern)); } - /** - * Logical nested operator with a given pattern on inner field. - */ + /** Logical nested operator with a given pattern on inner field. */ public static Pattern nested(Pattern pattern) { return Pattern.typeOf(LogicalNested.class).with(source(pattern)); } - /** - * Logical project operator with a given pattern on inner field. - */ + /** Logical project operator with a given pattern on inner field. */ public static Pattern project(Pattern pattern) { return Pattern.typeOf(LogicalProject.class).with(source(pattern)); } - /** - * Pattern for {@link TableScanBuilder} and capture it meanwhile. - */ + /** Pattern for {@link TableScanBuilder} and capture it meanwhile. */ public static Pattern scanBuilder() { return Pattern.typeOf(TableScanBuilder.class).capturedAs(Capture.newCapture()); } - /** - * LogicalPlan source {@link Property}. - */ + /** LogicalPlan source {@link Property}. */ public static Property source() { - return Property.optionalProperty("source", plan -> plan.getChild().size() == 1 - ? Optional.of(plan.getChild().get(0)) - : Optional.empty()); + return Property.optionalProperty( + "source", + plan -> + plan.getChild().size() == 1 ? Optional.of(plan.getChild().get(0)) : Optional.empty()); } - /** - * Source (children field) with a given pattern. - */ + /** Source (children field) with a given pattern. */ @SuppressWarnings("unchecked") - public static - PropertyPattern source(Pattern pattern) { - Property property = Property.optionalProperty("source", - plan -> plan.getChild().size() == 1 - ? Optional.of((T) plan.getChild().get(0)) - : Optional.empty()); + public static PropertyPattern source(Pattern pattern) { + Property property = + Property.optionalProperty( + "source", + plan -> + plan.getChild().size() == 1 + ? Optional.of((T) plan.getChild().get(0)) + : Optional.empty()); return property.matching(pattern); } - /** - * Logical relation with table field. - */ + /** Logical relation with table field. */ public static Property table() { - return Property.optionalProperty("table", - plan -> plan instanceof LogicalRelation - ? Optional.of(((LogicalRelation) plan).getTable()) - : Optional.empty()); + return Property.optionalProperty( + "table", + plan -> + plan instanceof LogicalRelation + ? Optional.of(((LogicalRelation) plan).getTable()) + : Optional.empty()); } - /** - * Logical write with table field. - */ + /** Logical write with table field. */ public static Property writeTable() { - return Property.optionalProperty("table", - plan -> plan instanceof LogicalWrite - ? Optional.of(((LogicalWrite) plan).getTable()) - : Optional.empty()); + return Property.optionalProperty( + "table", + plan -> + plan instanceof LogicalWrite + ? Optional.of(((LogicalWrite) plan).getTable()) + : Optional.empty()); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java index 57763728d5..6270eee131 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/MergeFilterAndFilter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer.rule; import static com.facebook.presto.matching.Pattern.typeOf; @@ -19,9 +18,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.optimizer.Rule; -/** - * Merge Filter --> Filter to the single Filter condition. - */ +/** Merge Filter --> Filter to the single Filter condition. */ public class MergeFilterAndFilter implements Rule { private final Capture capture; @@ -30,22 +27,18 @@ public class MergeFilterAndFilter implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeFilterAndFilter. - */ + /** Constructor of MergeFilterAndFilter. */ public MergeFilterAndFilter() { this.capture = Capture.newCapture(); - this.pattern = typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalFilter.class).capturedAs(capture))); + this.pattern = + typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalFilter.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalFilter filter, - Captures captures) { + public LogicalPlan apply(LogicalFilter filter, Captures captures) { LogicalFilter childFilter = captures.get(capture); return new LogicalFilter( - childFilter.getChild().get(0), - DSL.and(filter.getCondition(), childFilter.getCondition()) - ); + childFilter.getChild().get(0), DSL.and(filter.getCondition(), childFilter.getCondition())); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java index e3347b402b..b5cd312e64 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushFilterUnderSort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.optimizer.rule; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,7 +19,7 @@ import org.opensearch.sql.planner.optimizer.Rule; /** - * Push Filter under Sort. + * Push Filter under Sort.
* Filter - Sort - Child --> Sort - Filter - Child */ public class PushFilterUnderSort implements Rule { @@ -31,22 +30,17 @@ public class PushFilterUnderSort implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of PushFilterUnderSort. - */ + /** Constructor of PushFilterUnderSort. */ public PushFilterUnderSort() { this.capture = Capture.newCapture(); - this.pattern = typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalSort.class).capturedAs(capture))); + this.pattern = + typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalSort.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalFilter filter, - Captures captures) { + public LogicalPlan apply(LogicalFilter filter, Captures captures) { LogicalSort sort = captures.get(capture); - return new LogicalSort( - filter.replaceChildPlans(sort.getChild()), - sort.getSortList() - ); + return new LogicalSort(filter.replaceChildPlans(sort.getChild()), sort.getSortList()); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java index dbe61ca8c3..6ed8e1faeb 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/CreateTableScanBuilder.java @@ -19,9 +19,9 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Rule that replace logical relation operator to {@link TableScanBuilder} for later - * push down optimization. All push down optimization rules that depends on table scan - * builder needs to run after this. + * Rule that replace logical relation operator to {@link TableScanBuilder} for later push down + * optimization. All push down optimization rules that depends on table scan builder needs to run + * after this. */ public class CreateTableScanBuilder implements Rule { @@ -33,13 +33,10 @@ public class CreateTableScanBuilder implements Rule { @Getter private final Pattern pattern; - /** - * Construct create table scan builder rule. - */ + /** Construct create table scan builder rule. */ public CreateTableScanBuilder() { this.capture = Capture.newCapture(); - this.pattern = Pattern.typeOf(LogicalRelation.class) - .with(table().capturedAs(capture)); + this.pattern = Pattern.typeOf(LogicalRelation.class).with(table().capturedAs(capture)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java index de2b47d403..b83155d90f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/read/TableScanPushDown.java @@ -26,10 +26,10 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Rule template for all table scan push down rules. Because all push down optimization rules - * have similar workflow in common, such as a pattern that match an operator on top of table scan - * builder, and action that eliminates the original operator if pushed down, this class helps - * remove redundant code and improve readability. + * Rule template for all table scan push down rules. Because all push down optimization rules have + * similar workflow in common, such as a pattern that match an operator on top of table scan + * builder, and action that eliminates the original operator if pushed down, this class helps remove + * redundant code and improve readability. * * @param logical plan node type */ @@ -37,48 +37,32 @@ public class TableScanPushDown implements Rule { /** Push down optimize rule for filtering condition. */ public static final Rule PUSH_DOWN_FILTER = - match( - filter( - scanBuilder())) - .apply((filter, scanBuilder) -> scanBuilder.pushDownFilter(filter)); + match(filter(scanBuilder())) + .apply((filter, scanBuilder) -> scanBuilder.pushDownFilter(filter)); /** Push down optimize rule for aggregate operator. */ public static final Rule PUSH_DOWN_AGGREGATION = - match( - aggregate( - scanBuilder())) - .apply((agg, scanBuilder) -> scanBuilder.pushDownAggregation(agg)); + match(aggregate(scanBuilder())) + .apply((agg, scanBuilder) -> scanBuilder.pushDownAggregation(agg)); /** Push down optimize rule for sort operator. */ public static final Rule PUSH_DOWN_SORT = - match( - sort( - scanBuilder())) - .apply((sort, scanBuilder) -> scanBuilder.pushDownSort(sort)); + match(sort(scanBuilder())).apply((sort, scanBuilder) -> scanBuilder.pushDownSort(sort)); /** Push down optimize rule for limit operator. */ public static final Rule PUSH_DOWN_LIMIT = - match( - limit( - scanBuilder())) - .apply((limit, scanBuilder) -> scanBuilder.pushDownLimit(limit)); + match(limit(scanBuilder())).apply((limit, scanBuilder) -> scanBuilder.pushDownLimit(limit)); public static final Rule PUSH_DOWN_PROJECT = - match( - project( - scanBuilder())) - .apply((project, scanBuilder) -> scanBuilder.pushDownProject(project)); + match(project(scanBuilder())) + .apply((project, scanBuilder) -> scanBuilder.pushDownProject(project)); public static final Rule PUSH_DOWN_HIGHLIGHT = - match( - highlight( - scanBuilder())) + match(highlight(scanBuilder())) .apply((highlight, scanBuilder) -> scanBuilder.pushDownHighlight(highlight)); public static final Rule PUSH_DOWN_NESTED = - match( - nested( - scanBuilder())) + match(nested(scanBuilder())) .apply((nested, scanBuilder) -> scanBuilder.pushDownNested(nested)); /** Pattern that matches a plan node. */ @@ -90,10 +74,9 @@ public class TableScanPushDown implements Rule { /** Push down function applied to the plan node and captured table scan builder. */ private final BiFunction pushDownFunction; - @SuppressWarnings("unchecked") - private TableScanPushDown(WithPattern pattern, - BiFunction pushDownFunction) { + private TableScanPushDown( + WithPattern pattern, BiFunction pushDownFunction) { this.pattern = pattern; this.capture = ((CapturePattern) pattern.getPattern()).capture(); this.pushDownFunction = pushDownFunction; @@ -113,22 +96,18 @@ public LogicalPlan apply(T plan, Captures captures) { return plan; } - /** - * Custom builder class other than generated by Lombok to provide more readable code. - */ + /** Custom builder class other than generated by Lombok to provide more readable code. */ static class TableScanPushDownBuilder { private WithPattern pattern; - public static - TableScanPushDownBuilder match(Pattern pattern) { + public static TableScanPushDownBuilder match(Pattern pattern) { TableScanPushDownBuilder builder = new TableScanPushDownBuilder<>(); builder.pattern = (WithPattern) pattern; return builder; } - public TableScanPushDown apply( - BiFunction pushDownFunction) { + public TableScanPushDown apply(BiFunction pushDownFunction) { return new TableScanPushDown<>(pattern, pushDownFunction); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java index 4fbf676862..0a4045d404 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/write/CreateTableWriteBuilder.java @@ -32,13 +32,10 @@ public class CreateTableWriteBuilder implements Rule { @Getter private final Pattern pattern; - /** - * Construct create table write builder rule. - */ + /** Construct create table write builder rule. */ public CreateTableWriteBuilder() { this.capture = Capture.newCapture(); - this.pattern = Pattern.typeOf(LogicalWrite.class) - .with(writeTable().capturedAs(capture)); + this.pattern = Pattern.typeOf(LogicalWrite.class).with(writeTable().capturedAs(capture)); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java index 1d9523464b..cc1c047c31 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import java.util.Collections; @@ -17,7 +16,6 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.Aggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; -import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.planner.physical.collector.Collector; import org.opensearch.sql.storage.bindingtuple.BindingTuple; @@ -28,30 +26,26 @@ @EqualsAndHashCode(callSuper = false) @ToString public class AggregationOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final List aggregatorList; - @Getter - private final List groupByExprList; + @Getter private final PhysicalPlan input; + @Getter private final List aggregatorList; + @Getter private final List groupByExprList; - /** - * {@link BindingTuple} Collector. - */ - @EqualsAndHashCode.Exclude - private final Collector collector; - @EqualsAndHashCode.Exclude - private Iterator iterator; + /** {@link BindingTuple} Collector. */ + @EqualsAndHashCode.Exclude private final Collector collector; + + @EqualsAndHashCode.Exclude private Iterator iterator; /** * AggregationOperator Constructor. * - * @param input Input {@link PhysicalPlan} - * @param aggregatorList List of {@link Aggregator} + * @param input Input {@link PhysicalPlan} + * @param aggregatorList List of {@link Aggregator} * @param groupByExprList List of group by {@link Expression} */ - public AggregationOperator(PhysicalPlan input, List aggregatorList, - List groupByExprList) { + public AggregationOperator( + PhysicalPlan input, + List aggregatorList, + List groupByExprList) { this.input = input; this.aggregatorList = aggregatorList; this.groupByExprList = groupByExprList; @@ -68,7 +62,6 @@ public List getChild() { return Collections.singletonList(input); } - @Override public boolean hasNext() { return iterator.hasNext(); diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java index 7921d0dd50..688ffa0d8d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/CursorCloseOperator.java @@ -11,9 +11,9 @@ import org.opensearch.sql.executor.ExecutionEngine; /** - * A plan node which blocks issuing a request in {@link #open} and - * getting results in {@link #hasNext}, but doesn't block releasing resources in {@link #close}. - * Designed to be on top of the deserialized tree. + * A plan node which blocks issuing a request in {@link #open} and getting results in {@link + * #hasNext}, but doesn't block releasing resources in {@link #close}. Designed to be on top of the + * deserialized tree. */ @RequiredArgsConstructor public class CursorCloseOperator extends PhysicalPlan { @@ -41,9 +41,7 @@ public List getChild() { return List.of(input); } - /** - * Provides an empty schema, because this plan node is always located on the top of the tree. - */ + /** Provides an empty schema, because this plan node is always located on the top of the tree. */ @Override public ExecutionEngine.Schema schema() { return new ExecutionEngine.Schema(List.of()); diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java index 452fbd9707..7faec2154b 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/DedupeOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -28,21 +27,14 @@ @Getter @EqualsAndHashCode(callSuper = false) public class DedupeOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final List dedupeList; - @Getter - private final Integer allowedDuplication; - @Getter - private final Boolean keepEmpty; - @Getter - private final Boolean consecutive; - - @EqualsAndHashCode.Exclude - private final Deduper> deduper; - @EqualsAndHashCode.Exclude - private ExprValue next; + @Getter private final PhysicalPlan input; + @Getter private final List dedupeList; + @Getter private final Integer allowedDuplication; + @Getter private final Boolean keepEmpty; + @Getter private final Boolean consecutive; + + @EqualsAndHashCode.Exclude private final Deduper> deduper; + @EqualsAndHashCode.Exclude private ExprValue next; private static final Integer ALL_ONE_DUPLICATION = 1; private static final Boolean IGNORE_EMPTY = false; @@ -57,6 +49,7 @@ public DedupeOperator(PhysicalPlan input, List dedupeList) { /** * Dedup Constructor. + * * @param input input {@link PhysicalPlan} * @param dedupeList list of dedupe {@link Expression} * @param allowedDuplication max allowed duplication @@ -140,9 +133,7 @@ static class Deduper { private final BiFunction, K, Integer> seenFirstTime; private final Map seenMap = new ConcurrentHashMap<>(); - /** - * The Historical Deduper monitor the duplicated element with all the seen value. - */ + /** The Historical Deduper monitor the duplicated element with all the seen value. */ public static Deduper historicalDeduper() { return new Deduper<>( (map, key) -> { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java index 3b9e1a8214..ac62fe1b86 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/EvalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -34,17 +33,15 @@ * If the field name exist in the input, a new value will be put into to output. * *

The {@link EvalOperator#expressionList} are evaluated from left to right. It means you can - * reference previous evaluated field. - * e.g. fields velocity = distance/time, doubleVelocity = 2 * velocity + * reference previous evaluated field. e.g. fields velocity = distance/time, doubleVelocity = 2 * + * velocity */ @ToString @EqualsAndHashCode(callSuper = false) @RequiredArgsConstructor public class EvalOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final List> expressionList; + @Getter private final PhysicalPlan input; + @Getter private final List> expressionList; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -86,6 +83,7 @@ public ExprValue next() { /** * Evaluate the expression in the {@link EvalOperator#expressionList} with {@link Environment}. + * * @param env {@link Environment} * @return The mapping of reference and {@link ExprValue} for each expression. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java index 4b5045d24e..ec61d53163 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java @@ -17,21 +17,17 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * The Filter operator represents WHERE clause and - * uses the conditions to evaluate the input {@link BindingTuple}. - * The Filter operator only returns the results that evaluated to true. - * The NULL and MISSING are handled by the logic defined in {@link BinaryPredicateOperator}. + * The Filter operator represents WHERE clause and uses the conditions to evaluate the input {@link + * BindingTuple}. The Filter operator only returns the results that evaluated to true. The NULL and + * MISSING are handled by the logic defined in {@link BinaryPredicateOperator}. */ @EqualsAndHashCode(callSuper = false) @ToString @RequiredArgsConstructor public class FilterOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Expression conditions; - @ToString.Exclude - private ExprValue next = null; + @Getter private final PhysicalPlan input; + @Getter private final Expression conditions; + @ToString.Exclude private ExprValue next = null; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java index cd84234c4b..dc9038f2a3 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/LimitOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -15,15 +14,15 @@ import org.opensearch.sql.data.model.ExprValue; /** - * The limit operator sets a window, to and block the rows out of the window - * and allow only the result subset within this window to the output. + * The limit operator sets a window, to and block the rows out of the window and allow only the + * result subset within this window to the output. * - *

The result subset is enframed from original result with {@link LimitOperator#offset} - * as the offset and {@link LimitOperator#limit} as the size, thus the output - * is the subset of the original result set that has indices from {index + 1} to {index + limit}. - * Special cases might occur where the result subset has a size smaller than expected {limit}, - * it occurs when the original result set has a size smaller than {index + limit}, - * or even not greater than the offset. The latter results in an empty output.

+ *

The result subset is enframed from original result with {@link LimitOperator#offset} as the + * offset and {@link LimitOperator#limit} as the size, thus the output is the subset of the original + * result set that has indices from {index + 1} to {index + limit}. Special cases might occur where + * the result subset has a size smaller than expected {limit}, it occurs when the original result + * set has a size smaller than {index + limit}, or even not greater than the offset. The latter + * results in an empty output. */ @RequiredArgsConstructor @Getter @@ -66,5 +65,4 @@ public R accept(PhysicalPlanNodeVisitor visitor, C context) { public List getChild() { return ImmutableList.of(input); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java index 54cd541519..8539df5463 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/NestedOperator.java @@ -27,58 +27,47 @@ import org.opensearch.sql.expression.ReferenceExpression; /** - * The NestedOperator evaluates the {@link NestedOperator#fields} and - * generates {@link NestedOperator#nonNestedFields} to form the - * {@link NestedOperator#result} output. Resolve two nested fields - * with differing paths will result in a cartesian product(inner join). + * The NestedOperator evaluates the {@link NestedOperator#fields} and generates {@link + * NestedOperator#nonNestedFields} to form the {@link NestedOperator#result} output. Resolve two + * nested fields with differing paths will result in a cartesian product(inner join). */ @EqualsAndHashCode(callSuper = false) public class NestedOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Set fields; // Needs to be a Set to match legacy implementation - @Getter - private final Map> groupedPathsAndFields; - @EqualsAndHashCode.Exclude - private List> result = new ArrayList<>(); - @EqualsAndHashCode.Exclude - private List nonNestedFields = new ArrayList<>(); + @Getter private final PhysicalPlan input; + @Getter private final Set fields; // Needs to be a Set to match legacy implementation + @Getter private final Map> groupedPathsAndFields; + @EqualsAndHashCode.Exclude private List> result = new ArrayList<>(); + @EqualsAndHashCode.Exclude private List nonNestedFields = new ArrayList<>(); + @EqualsAndHashCode.Exclude private ListIterator> flattenedResult = result.listIterator(); /** * Constructor for NestedOperator with list of map as arg. + * * @param input : PhysicalPlan input. * @param fields : List of all fields and paths for nested fields. */ public NestedOperator(PhysicalPlan input, List> fields) { this.input = input; - this.fields = fields.stream() - .map(m -> m.get("field").toString()) - .collect(Collectors.toSet()); - this.groupedPathsAndFields = fields.stream().collect( - Collectors.groupingBy( - m -> m.get("path").toString(), - mapping( - m -> m.get("field").toString(), - toList() - ) - ) - ); + this.fields = fields.stream().map(m -> m.get("field").toString()).collect(Collectors.toSet()); + this.groupedPathsAndFields = + fields.stream() + .collect( + Collectors.groupingBy( + m -> m.get("path").toString(), + mapping(m -> m.get("field").toString(), toList()))); } /** * Constructor for NestedOperator with Set of fields. + * * @param input : PhysicalPlan input. * @param fields : List of all fields for nested fields. * @param groupedPathsAndFields : Map of fields grouped by their path. */ public NestedOperator( - PhysicalPlan input, - Set fields, - Map> groupedPathsAndFields - ) { + PhysicalPlan input, Set fields, Map> groupedPathsAndFields) { this.input = input; this.fields = fields; this.groupedPathsAndFields = groupedPathsAndFields; @@ -128,16 +117,16 @@ public ExprValue next() { } /** - * Generate list of non-nested fields that are in inputMap, but not in the member variable - * fields list. + * Generate list of non-nested fields that are in inputMap, but not in the member variable fields + * list. + * * @param inputMap : Row to parse non-nested fields. */ public void generateNonNestedFieldsMap(ExprValue inputMap) { for (Map.Entry inputField : inputMap.tupleValue().entrySet()) { boolean foundNestedField = - this.fields.stream().anyMatch( - field -> field.split("\\.")[0].equalsIgnoreCase(inputField.getKey()) - ); + this.fields.stream() + .anyMatch(field -> field.split("\\.")[0].equalsIgnoreCase(inputField.getKey())); if (!foundNestedField) { this.nonNestedFields.add(inputField.getKey()); @@ -145,12 +134,11 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { } } - /** - * Simplifies the structure of row's source Map by flattening it, - * making the full path of an object the key - * and the Object it refers to the value. + * Simplifies the structure of row's source Map by flattening it, making the full path of an + * object the key and the Object it refers to the value. * + *

    * 

Sample input: * keys = ['comments.likes'] * row = comments: { @@ -159,6 +147,7 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { * *

Return: * flattenedRow = {comment.likes: 2} + *

* * @param nestedField : Field to query in row. * @param row : Row returned from OS. @@ -166,11 +155,7 @@ public void generateNonNestedFieldsMap(ExprValue inputMap) { * @return : List of nested select items or cartesian product of nested calls. */ private List> flatten( - String nestedField, - ExprValue row, - List> prevList - ) { + String nestedField, ExprValue row, List> prevList) { List> copy = new ArrayList<>(); List> newList = new ArrayList<>(); @@ -201,11 +186,10 @@ private List> flatten( // Generate cartesian product for (Map prevMap : prevList) { for (Map newMap : copy) { - newList.add(Stream.of(newMap, prevMap) - .flatMap(map -> map.entrySet().stream()) - .collect(Collectors.toMap( - Map.Entry::getKey, - Map.Entry::getValue))); + newList.add( + Stream.of(newMap, prevMap) + .flatMap(map -> map.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); } } return newList; @@ -214,6 +198,7 @@ private List> flatten( /** * Check if newMap field has any sharing paths in prevMap. + * * @param newMap : New map to add to result set. * @return : true if there is already a field added to result set with same path. */ @@ -243,9 +228,11 @@ boolean containSamePath(Map newMap) { * @return : Object at current nested level. */ private void getNested( - String field, String nestedField, ExprValue row, - List> ret, ExprValue nestedObj - ) { + String field, + String nestedField, + ExprValue row, + List> ret, + ExprValue nestedObj) { ExprValue currentObj = (nestedObj == null) ? row : nestedObj; String[] splitKeys = nestedField.split("\\."); @@ -271,12 +258,10 @@ private void getNested( // Return final nested result if (currentObj != null && (StringUtils.substringAfterLast(field, ".").equals(nestedField) - || !field.contains(".")) - ) { + || !field.contains("."))) { ret.add(new LinkedHashMap<>(Map.of(field, currentObj))); } else if (currentObj != null) { - getNested(field, nestedField.substring(nestedField.indexOf(".") + 1), - row, ret, currentObj); + getNested(field, nestedField.substring(nestedField.indexOf(".") + 1), row, ret, currentObj); } } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java index 247b347940..0ae795aa31 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import java.util.Iterator; @@ -12,9 +11,7 @@ import org.opensearch.sql.planner.PlanNode; import org.opensearch.sql.storage.split.Split; -/** - * Physical plan. - */ +/** Physical plan. */ public abstract class PhysicalPlan implements PlanNode, Iterator, AutoCloseable { /** @@ -22,8 +19,8 @@ public abstract class PhysicalPlan * * @param visitor visitor. * @param context visitor context. - * @param returned object type. - * @param context type. + * @param returned object type. + * @param context type. * @return returned object. */ public abstract R accept(PhysicalPlanNodeVisitor visitor, C context); @@ -41,7 +38,9 @@ public void add(Split split) { } public ExecutionEngine.Schema schema() { - throw new IllegalStateException(String.format("[BUG] schema can been only applied to " - + "ProjectOperator, instead of %s", this.getClass().getSimpleName())); + throw new IllegalStateException( + String.format( + "[BUG] schema can been only applied to " + "ProjectOperator, instead of %s", + this.getClass().getSimpleName())); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java index 8c10c91fb6..147f0e08dc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanDSL.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -23,9 +22,7 @@ import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.expression.window.WindowDefinition; -/** - * Physical Plan DSL. - */ +/** Physical Plan DSL. */ @UtilityClass public class PhysicalPlanDSL { @@ -47,8 +44,10 @@ public static ProjectOperator project(PhysicalPlan input, NamedExpression... fie return new ProjectOperator(input, Arrays.asList(fields), ImmutableList.of()); } - public static ProjectOperator project(PhysicalPlan input, List fields, - List namedParseExpressions) { + public static ProjectOperator project( + PhysicalPlan input, + List fields, + List namedParseExpressions) { return new ProjectOperator(input, fields, namedParseExpressions); } @@ -61,8 +60,7 @@ public static EvalOperator eval( return new EvalOperator(input, Arrays.asList(expressions)); } - public static SortOperator sort(PhysicalPlan input, Pair... sorts) { + public static SortOperator sort(PhysicalPlan input, Pair... sorts) { return new SortOperator(input, Arrays.asList(sorts)); } @@ -80,22 +78,27 @@ public static DedupeOperator dedupe( input, Arrays.asList(expressions), allowedDuplication, keepEmpty, consecutive); } - public WindowOperator window(PhysicalPlan input, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + public WindowOperator window( + PhysicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { return new WindowOperator(input, windowFunction, windowDefinition); } - public static RareTopNOperator rareTopN(PhysicalPlan input, CommandType commandType, - List groups, Expression... expressions) { + public static RareTopNOperator rareTopN( + PhysicalPlan input, + CommandType commandType, + List groups, + Expression... expressions) { return new RareTopNOperator(input, commandType, Arrays.asList(expressions), groups); } - public static RareTopNOperator rareTopN(PhysicalPlan input, CommandType commandType, - int noOfResults, - List groups, Expression... expressions) { - return new RareTopNOperator(input, commandType, noOfResults, Arrays.asList(expressions), - groups); + public static RareTopNOperator rareTopN( + PhysicalPlan input, + CommandType commandType, + int noOfResults, + List groups, + Expression... expressions) { + return new RareTopNOperator( + input, commandType, noOfResults, Arrays.asList(expressions), groups); } @SafeVarargs @@ -108,9 +111,7 @@ public static LimitOperator limit(PhysicalPlan input, Integer limit, Integer off } public static NestedOperator nested( - PhysicalPlan input, - Set args, - Map> groupedFieldsByPath) { + PhysicalPlan input, Set args, Map> groupedFieldsByPath) { return new NestedOperator(input, args, groupedFieldsByPath); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java index 1e8f08d39f..99b5cc8020 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import org.opensearch.sql.storage.TableScanOperator; @@ -72,7 +71,7 @@ public R visitValues(ValuesOperator node, C context) { public R visitSort(SortOperator node, C context) { return visitNode(node, context); } - + public R visitRareTopN(RareTopNOperator node, C context) { return visitNode(node, context); } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java index 1699c97c15..55422dacd3 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableMap; @@ -27,19 +26,14 @@ import org.opensearch.sql.expression.parse.ParseExpression; import org.opensearch.sql.planner.SerializablePlan; -/** - * Project the fields specified in {@link ProjectOperator#projectList} from input. - */ +/** Project the fields specified in {@link ProjectOperator#projectList} from input. */ @ToString @EqualsAndHashCode(callSuper = false) @AllArgsConstructor public class ProjectOperator extends PhysicalPlan implements SerializablePlan { - @Getter - private PhysicalPlan input; - @Getter - private List projectList; - @Getter - private List namedParseExpressions; + @Getter private PhysicalPlan input; + @Getter private List projectList; + @Getter private List namedParseExpressions; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -65,17 +59,20 @@ public ExprValue next() { // TODO needs a better implementation, see https://github.com/opensearch-project/sql/issues/458 for (NamedExpression expr : projectList) { ExprValue exprValue = expr.valueOf(inputValue.bindingTuples()); - Optional optionalParseExpression = namedParseExpressions.stream() - .filter(parseExpr -> parseExpr.getNameOrAlias().equals(expr.getNameOrAlias())) - .findFirst(); + Optional optionalParseExpression = + namedParseExpressions.stream() + .filter(parseExpr -> parseExpr.getNameOrAlias().equals(expr.getNameOrAlias())) + .findFirst(); if (optionalParseExpression.isEmpty()) { mapBuilder.put(expr.getNameOrAlias(), exprValue); continue; } NamedExpression parseExpression = optionalParseExpression.get(); - ExprValue sourceFieldValue = inputValue.bindingTuples() - .resolve(((ParseExpression) parseExpression.getDelegated()).getSourceField()); + ExprValue sourceFieldValue = + inputValue + .bindingTuples() + .resolve(((ParseExpression) parseExpression.getDelegated()).getSourceField()); if (sourceFieldValue.isMissing()) { // source field will be missing after stats command, read from inputValue if it exists // otherwise do nothing since it should not appear as a field @@ -94,15 +91,17 @@ public ExprValue next() { @Override public ExecutionEngine.Schema schema() { - return new ExecutionEngine.Schema(getProjectList().stream() - .map(expr -> new ExecutionEngine.Schema.Column(expr.getName(), - expr.getAlias(), expr.type())).collect(Collectors.toList())); + return new ExecutionEngine.Schema( + getProjectList().stream() + .map( + expr -> + new ExecutionEngine.Schema.Column(expr.getName(), expr.getAlias(), expr.type())) + .collect(Collectors.toList())); } /** Don't use, it is for deserialization needs only. */ @Deprecated - public ProjectOperator() { - } + public ProjectOperator() {} @SuppressWarnings("unchecked") @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java index fb3a91e2e6..ecf997f7ae 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RareTopNOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.annotations.VisibleForTesting; @@ -36,40 +35,38 @@ @EqualsAndHashCode(callSuper = false) public class RareTopNOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final CommandType commandType; - @Getter - private final Integer noOfResults; - @Getter - private final List fieldExprList; - @Getter - private final List groupByExprList; - - @EqualsAndHashCode.Exclude - private final Group group; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @Getter private final PhysicalPlan input; + @Getter private final CommandType commandType; + @Getter private final Integer noOfResults; + @Getter private final List fieldExprList; + @Getter private final List groupByExprList; - private static final Integer DEFAULT_NO_OF_RESULTS = 10; + @EqualsAndHashCode.Exclude private final Group group; + @EqualsAndHashCode.Exclude private Iterator iterator; + private static final Integer DEFAULT_NO_OF_RESULTS = 10; - public RareTopNOperator(PhysicalPlan input, CommandType commandType, - List fieldExprList, List groupByExprList) { + public RareTopNOperator( + PhysicalPlan input, + CommandType commandType, + List fieldExprList, + List groupByExprList) { this(input, commandType, DEFAULT_NO_OF_RESULTS, fieldExprList, groupByExprList); } /** * RareTopNOperator Constructor. * - * @param input Input {@link PhysicalPlan} - * @param commandType Enum for Rare/TopN command. - * @param noOfResults Number of results - * @param fieldExprList List of {@link Expression} + * @param input Input {@link PhysicalPlan} + * @param commandType Enum for Rare/TopN command. + * @param noOfResults Number of results + * @param fieldExprList List of {@link Expression} * @param groupByExprList List of group by {@link Expression} */ - public RareTopNOperator(PhysicalPlan input, CommandType commandType, int noOfResults, + public RareTopNOperator( + PhysicalPlan input, + CommandType commandType, + int noOfResults, List fieldExprList, List groupByExprList) { this.input = input; @@ -115,48 +112,50 @@ public class Group { private final Map> groupListMap = new HashMap<>(); - /** - * Push the BindingTuple to Group. - */ + /** Push the BindingTuple to Group. */ public void push(ExprValue inputValue) { Key groupKey = new Key(inputValue, groupByExprList); Key fieldKey = new Key(inputValue, fieldExprList); - groupListMap.computeIfAbsent(groupKey, k -> { - Map map = new HashMap<>(); - map.put(fieldKey, 1); - return map; - }); - groupListMap.computeIfPresent(groupKey, (key, map) -> { - map.computeIfAbsent(fieldKey, f -> 1); - map.computeIfPresent(fieldKey, (field, count) -> { - return count + 1; - }); - return map; - }); + groupListMap.computeIfAbsent( + groupKey, + k -> { + Map map = new HashMap<>(); + map.put(fieldKey, 1); + return map; + }); + groupListMap.computeIfPresent( + groupKey, + (key, map) -> { + map.computeIfAbsent(fieldKey, f -> 1); + map.computeIfPresent( + fieldKey, + (field, count) -> { + return count + 1; + }); + return map; + }); } - /** - * Get the list of {@link BindingTuple} for each group. - */ + /** Get the list of {@link BindingTuple} for each group. */ public List result() { ImmutableList.Builder resultBuilder = new ImmutableList.Builder<>(); - groupListMap.forEach((groups, fieldMap) -> { - Map map = new LinkedHashMap<>(); - List result = find(fieldMap); - result.forEach(field -> { - map.putAll(groups.keyMap(groupByExprList)); - map.putAll(field.keyMap(fieldExprList)); - resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); - }); - }); + groupListMap.forEach( + (groups, fieldMap) -> { + Map map = new LinkedHashMap<>(); + List result = find(fieldMap); + result.forEach( + field -> { + map.putAll(groups.keyMap(groupByExprList)); + map.putAll(field.keyMap(fieldExprList)); + resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); + }); + }); return resultBuilder.build(); } - /** - * Get a list of result. - */ + /** Get a list of result. */ public List find(Map map) { Comparator> valueComparator; if (CommandType.TOP.equals(commandType)) { @@ -165,40 +164,37 @@ public List find(Map map) { valueComparator = Map.Entry.comparingByValue(); } - return map.entrySet().stream().sorted(valueComparator).limit(noOfResults) - .map(Map.Entry::getKey).collect(Collectors.toList()); + return map.entrySet().stream() + .sorted(valueComparator) + .limit(noOfResults) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); } } - /** - * Key. - */ + /** Key. */ @EqualsAndHashCode @VisibleForTesting public class Key { private final List valueList; - /** - * Key constructor. - */ + /** Key constructor. */ public Key(ExprValue value, List exprList) { - this.valueList = exprList.stream() - .map(expr -> expr.valueOf(value.bindingTuples())).collect(Collectors.toList()); + this.valueList = + exprList.stream() + .map(expr -> expr.valueOf(value.bindingTuples())) + .collect(Collectors.toList()); } - /** - * Return the Map of key and key value. - */ + /** Return the Map of key and key value. */ public Map keyMap(List exprList) { return Streams.zip( - exprList.stream().map( - expression -> expression.toString()), - valueList.stream(), - AbstractMap.SimpleEntry::new - ).collect(Collectors.toMap(key -> key.getKey(), key -> key.getValue())); + exprList.stream().map(expression -> expression.toString()), + valueList.stream(), + AbstractMap.SimpleEntry::new) + .collect(Collectors.toMap(key -> key.getKey(), key -> key.getValue())); } } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java index 3fa3519d10..b4a724aa7a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RemoveOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -24,26 +23,19 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Remove the fields specified in {@link RemoveOperator#removeList} from input. - */ +/** Remove the fields specified in {@link RemoveOperator#removeList} from input. */ @ToString @EqualsAndHashCode(callSuper = false) public class RemoveOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Set removeList; - @ToString.Exclude - @EqualsAndHashCode.Exclude - private final Set nameRemoveList; + @Getter private final PhysicalPlan input; + @Getter private final Set removeList; + @ToString.Exclude @EqualsAndHashCode.Exclude private final Set nameRemoveList; /** * Todo. This is the temporary solution that add the mapping between string and ref. because when * rename the field from input, there we can only get the string field. */ - public RemoveOperator(PhysicalPlan input, - Set removeList) { + public RemoveOperator(PhysicalPlan input, Set removeList) { this.input = input; this.removeList = removeList; this.nameRemoveList = diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java index f0b0d13c50..e6f97dab4a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/RenameOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.data.type.ExprCoreType.STRUCT; @@ -24,35 +23,30 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * Rename the binding name in {@link BindingTuple}. - * The mapping maintain the relation between source and target. - * it means BindingTuple.resolve(target) = BindingTuple.resolve(source). + * Rename the binding name in {@link BindingTuple}. The mapping maintain the relation between source + * and target. it means BindingTuple.resolve(target) = BindingTuple.resolve(source). */ @EqualsAndHashCode(callSuper = false) @ToString public class RenameOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; - @Getter - private final Map mapping; + @Getter private final PhysicalPlan input; + @Getter private final Map mapping; + /** * Todo. This is the temporary solution that add the mapping between string and ref. because when * rename the field from input, there we can only get the string field. */ - @ToString.Exclude - @EqualsAndHashCode.Exclude + @ToString.Exclude @EqualsAndHashCode.Exclude private final Map nameMapping; - /** - * Constructor of RenameOperator. - */ - public RenameOperator(PhysicalPlan input, - Map mapping) { + /** Constructor of RenameOperator. */ + public RenameOperator(PhysicalPlan input, Map mapping) { this.input = input; this.mapping = mapping; this.nameMapping = - mapping.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().getAttr(), - entry -> entry.getValue())); + mapping.entrySet().stream() + .collect( + Collectors.toMap(entry -> entry.getKey().getAttr(), entry -> entry.getValue())); } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java index 4463892ca5..e3116baedf 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/SortOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import static org.opensearch.sql.ast.tree.Sort.NullOrder.NULL_FIRST; @@ -28,30 +27,26 @@ /** * Sort Operator.The input data is sorted by the sort fields in the {@link SortOperator#sortList}. - * The sort field is specified by the {@link Expression} with {@link SortOption}. - * The count indicate how many sorted result should been return. + * The sort field is specified by the {@link Expression} with {@link SortOption}. The count indicate + * how many sorted result should been return. */ @ToString @EqualsAndHashCode(callSuper = false) public class SortOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final List> sortList; - @EqualsAndHashCode.Exclude - private final Sorter sorter; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @Getter private final List> sortList; + @EqualsAndHashCode.Exclude private final Sorter sorter; + @EqualsAndHashCode.Exclude private Iterator iterator; /** * Sort Operator Constructor. + * * @param input input {@link PhysicalPlan} - * @param sortList list of sort sort field. - * The sort field is specified by the {@link Expression} with {@link SortOption} + * @param sortList list of sort sort field. The sort field is specified by the {@link Expression} + * with {@link SortOption} */ - public SortOperator( - PhysicalPlan input, List> sortList) { + public SortOperator(PhysicalPlan input, List> sortList) { this.input = input; this.sortList = sortList; SorterBuilder sorterBuilder = Sorter.builder(); @@ -101,8 +96,7 @@ public ExprValue next() { @Builder public static class Sorter implements Comparator { - @Singular - private final List>> comparators; + @Singular private final List>> comparators; @Override public int compare(ExprValue o1, ExprValue o2) { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java index 4ac9d6a30a..4a4ce27da8 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableList; @@ -18,22 +17,15 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.LiteralExpression; -/** - * Physical operator for Values. - */ +/** Physical operator for Values. */ @ToString @EqualsAndHashCode(callSuper = false, of = "values") public class ValuesOperator extends PhysicalPlan { - /** - * Original values list for print and equality check. - */ - @Getter - private final List> values; + /** Original values list for print and equality check. */ + @Getter private final List> values; - /** - * Values iterator. - */ + /** Values iterator. */ private final Iterator> valuesIterator; public ValuesOperator(List> values) { @@ -58,10 +50,8 @@ public boolean hasNext() { @Override public ExprValue next() { - List values = valuesIterator.next().stream() - .map(Expression::valueOf) - .collect(Collectors.toList()); + List values = + valuesIterator.next().stream().map(Expression::valueOf).collect(Collectors.toList()); return new ExprCollectionValue(values); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java index 8ecdcfbf49..10377ce47a 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/WindowOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.planner.physical; import com.google.common.collect.ImmutableMap; @@ -21,43 +20,34 @@ import org.opensearch.sql.expression.window.WindowFunctionExpression; import org.opensearch.sql.expression.window.frame.WindowFrame; -/** - * Physical operator for window function computation. - */ +/** Physical operator for window function computation. */ @EqualsAndHashCode(callSuper = false) @ToString public class WindowOperator extends PhysicalPlan { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final NamedExpression windowFunction; + @Getter private final NamedExpression windowFunction; - @Getter - private final WindowDefinition windowDefinition; + @Getter private final WindowDefinition windowDefinition; - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final WindowFrame windowFrame; + @EqualsAndHashCode.Exclude @ToString.Exclude private final WindowFrame windowFrame; /** - * Peeking iterator that can peek next element which is required - * by window frame such as peer frame to prefetch all rows related - * to same peer (of same sorting key). + * Peeking iterator that can peek next element which is required by window frame such as peer + * frame to prefetch all rows related to same peer (of same sorting key). */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + @EqualsAndHashCode.Exclude @ToString.Exclude private final PeekingIterator peekingIterator; /** * Initialize window operator. - * @param input child operator - * @param windowFunction window function - * @param windowDefinition window definition + * + * @param input child operator + * @param windowFunction window function + * @param windowDefinition window definition */ - public WindowOperator(PhysicalPlan input, - NamedExpression windowFunction, - WindowDefinition windowDefinition) { + public WindowOperator( + PhysicalPlan input, NamedExpression windowFunction, WindowDefinition windowDefinition) { this.input = input; this.windowFunction = windowFunction; this.windowDefinition = windowDefinition; @@ -107,5 +97,4 @@ private void addWindowFunctionResultColumn(ImmutableMap.Builder supplier; /** - * Map from bucketKey to nested collector sorted by key to make sure - * final result is in order after traversal. + * Map from bucketKey to nested collector sorted by key to make sure final result is in order + * after traversal. */ private final Map collectorMap = new TreeMap<>(); - /** - * Bucket Index. - */ + /** Bucket Index. */ private int bucketIndex = 0; /** - * Collect Bucket from {@link BindingTuple}. - * If bucket not exist, create new bucket and {@link Collector}. - * If bucket exist, let {@link Collector} in the bucket collect from {@link BindingTuple}. + * Collect Bucket from {@link BindingTuple}. If bucket not exist, create new bucket and {@link + * Collector}. If bucket exist, let {@link Collector} in the bucket collect from {@link + * BindingTuple}. * * @param input {@link BindingTuple}. */ @@ -64,6 +56,7 @@ public void collect(BindingTuple input) { /** * Bucket Key. + * * @param tuple {@link BindingTuple}. * @return Bucket Key. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java index a2b3a41a27..e696d5068f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Collector.java @@ -13,9 +13,7 @@ import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.storage.bindingtuple.BindingTuple; -/** - * Interface of {@link BindingTuple} Collector. - */ +/** Interface of {@link BindingTuple} Collector. */ public interface Collector { /** @@ -32,16 +30,12 @@ public interface Collector { */ List results(); - /** - * {@link Collector} tree builder. - */ + /** {@link Collector} tree builder. */ @UtilityClass class Builder { - /** - * build {@link Collector}. - */ - public static Collector build(List buckets, - List aggregators) { + /** build {@link Collector}. */ + public static Collector build( + List buckets, List aggregators) { if (buckets.isEmpty()) { return new MetricCollector(aggregators); } else { diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java index c804c7bc9b..2cfa3c9457 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/MetricCollector.java @@ -22,9 +22,7 @@ */ public class MetricCollector implements Collector { - /** - * List of {@link NamedAggregator}. - */ + /** List of {@link NamedAggregator}. */ private final List> aggregators; /** diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java index 782c931046..81a1a0230f 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/collector/Rounding.java @@ -34,15 +34,11 @@ import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.utils.DateTimeUtils; -/** - * Rounding. - */ +/** Rounding. */ @EqualsAndHashCode public abstract class Rounding { - /** - * Create Rounding instance. - */ + /** Create Rounding instance. */ public static Rounding createRounding(SpanExpression span) { ExprValue interval = span.getValue().valueOf(); ExprType type = span.type(); @@ -70,7 +66,6 @@ public static Rounding createRounding(SpanExpression span) { public abstract ExprValue round(ExprValue value); - static class TimestampRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -82,13 +77,13 @@ public TimestampRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timestampValue() - .toEpochMilli(), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round(var.timestampValue().toEpochMilli(), interval.integerValue())); return new ExprTimestampValue(instant); } } - static class DatetimeRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -100,13 +95,15 @@ public DatetimeRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.datetimeValue() - .atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round( + var.datetimeValue().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), + interval.integerValue())); return new ExprDatetimeValue(instant.atZone(UTC_ZONE_ID).toLocalDateTime()); } } - static class DateRounding extends Rounding { private final ExprValue interval; private final DateTimeUnit dateTimeUnit; @@ -118,8 +115,11 @@ public DateRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.dateValue().atStartOfDay() - .atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round( + var.dateValue().atStartOfDay().atZone(UTC_ZONE_ID).toInstant().toEpochMilli(), + interval.integerValue())); return new ExprDateValue(instant.atZone(UTC_ZONE_ID).toLocalDate()); } } @@ -136,17 +136,18 @@ public TimeRounding(ExprValue interval, String unit) { @Override public ExprValue round(ExprValue var) { if (dateTimeUnit.id > 4) { - throw new ExpressionEvaluationException(String - .format("Unable to set span unit %s for TIME type", dateTimeUnit.getName())); + throw new ExpressionEvaluationException( + String.format("Unable to set span unit %s for TIME type", dateTimeUnit.getName())); } - Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timeValue().getLong( - ChronoField.MILLI_OF_DAY), interval.integerValue())); + Instant instant = + Instant.ofEpochMilli( + dateTimeUnit.round( + var.timeValue().getLong(ChronoField.MILLI_OF_DAY), interval.integerValue())); return new ExprTimeValue(instant.atZone(UTC_ZONE_ID).toLocalTime()); } } - static class LongRounding extends Rounding { private final Long longInterval; @@ -161,7 +162,6 @@ public ExprValue round(ExprValue value) { } } - static class DoubleRounding extends Rounding { private final Double doubleInterval; @@ -171,13 +171,12 @@ protected DoubleRounding(ExprValue interval) { @Override public ExprValue round(ExprValue value) { - double rounded = Double - .valueOf(value.doubleValue() / doubleInterval).intValue() * doubleInterval; + double rounded = + Double.valueOf(value.doubleValue() / doubleInterval).intValue() * doubleInterval; return ExprValueUtils.doubleValue(rounded); } } - @RequiredArgsConstructor static class UnknownRounding extends Rounding { @Override @@ -186,43 +185,37 @@ public ExprValue round(ExprValue var) { } } - @RequiredArgsConstructor public enum DateTimeUnit { - MILLISECOND(1, "ms", true, ChronoField.MILLI_OF_SECOND - .getBaseUnit().getDuration().toMillis()) { + MILLISECOND(1, "ms", true, ChronoField.MILLI_OF_SECOND.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - SECOND(2, "s", true, ChronoField.SECOND_OF_MINUTE - .getBaseUnit().getDuration().toMillis()) { + SECOND(2, "s", true, ChronoField.SECOND_OF_MINUTE.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - MINUTE(3, "m", true, ChronoField.MINUTE_OF_HOUR - .getBaseUnit().getDuration().toMillis()) { + MINUTE(3, "m", true, ChronoField.MINUTE_OF_HOUR.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - HOUR(4, "h", true, ChronoField.HOUR_OF_DAY - .getBaseUnit().getDuration().toMillis()) { + HOUR(4, "h", true, ChronoField.HOUR_OF_DAY.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); } }, - DAY(5, "d", true, ChronoField.DAY_OF_MONTH - .getBaseUnit().getDuration().toMillis()) { + DAY(5, "d", true, ChronoField.DAY_OF_MONTH.getBaseUnit().getDuration().toMillis()) { @Override long round(long utcMillis, int interval) { return DateTimeUtils.roundFloor(utcMillis, ratio * interval); @@ -257,18 +250,14 @@ long round(long utcMillis, int interval) { } }; - @Getter - private final int id; - @Getter - private final String name; + @Getter private final int id; + @Getter private final String name; protected final boolean isMillisBased; protected final long ratio; abstract long round(long utcMillis, int interval); - /** - * Resolve the date time unit. - */ + /** Resolve the date time unit. */ public static Rounding.DateTimeUnit resolve(String name) { switch (name) { case "M": @@ -283,5 +272,4 @@ public static Rounding.DateTimeUnit resolve(String name) { } } } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java index 105ad5ed32..5542d0f0e4 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTable.java @@ -19,12 +19,9 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.Table; - /** - * Table implementation to handle show datasources command. - * Since datasource information is not tied to any storage engine, this info - * is handled via DataSource Table. - * + * Table implementation to handle show datasources command. Since datasource information is not tied + * to any storage engine, this info is handled via DataSource Table. */ @RequiredArgsConstructor @EqualsAndHashCode @@ -44,8 +41,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public static class DataSourceTableDefaultImplementor - extends DefaultImplementor { + public static class DataSourceTableDefaultImplementor extends DefaultImplementor { private final DataSourceService dataSourceService; @@ -54,5 +50,4 @@ public PhysicalPlan visitRelation(LogicalRelation node, Object context) { return new DataSourceTableScan(dataSourceService); } } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java index 93e65054b5..bc92df7d16 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java @@ -22,11 +22,9 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * This class handles table scan of data source table. - * Right now these are derived from dataSourceService thorough static fields. - * In future this might scan data from underlying datastore if we start - * persisting datasource info somewhere. - * + * This class handles table scan of data source table. Right now these are derived from + * dataSourceService thorough static fields. In future this might scan data from underlying + * datastore if we start persisting datasource info somewhere. */ public class DataSourceTableScan extends TableScanOperator { @@ -47,15 +45,16 @@ public String explain() { @Override public void open() { List exprValues = new ArrayList<>(); - Set dataSourceMetadataSet - = dataSourceService.getDataSourceMetadata(true); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(true); for (DataSourceMetadata dataSourceMetadata : dataSourceMetadataSet) { exprValues.add( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "DATASOURCE_NAME", - ExprValueUtils.stringValue(dataSourceMetadata.getName()), - "CONNECTOR_TYPE", - ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name()))))); + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "DATASOURCE_NAME", + ExprValueUtils.stringValue(dataSourceMetadata.getName()), + "CONNECTOR_TYPE", + ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name()))))); } iterator = exprValues.iterator(); } @@ -69,5 +68,4 @@ public boolean hasNext() { public ExprValue next() { return iterator.next(); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java index dd959d9b56..469305a15d 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableSchema.java @@ -13,19 +13,16 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprType; -/** - * Definition of the data source table schema. - */ +/** Definition of the data source table schema. */ @Getter @RequiredArgsConstructor public enum DataSourceTableSchema { - - DATASOURCE_TABLE_SCHEMA(new LinkedHashMap<>() { - { - put("DATASOURCE_NAME", STRING); - put("CONNECTOR_TYPE", STRING); - } - } - ); + DATASOURCE_TABLE_SCHEMA( + new LinkedHashMap<>() { + { + put("DATASOURCE_NAME", STRING); + put("CONNECTOR_TYPE", STRING); + } + }); private final Map mapping; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java b/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java index 18eb10f19d..87ff048531 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/StreamContext.java @@ -8,8 +8,8 @@ import lombok.Data; /** - * Stream context required by stream processing components and can be - * stored and restored between executions. + * Stream context required by stream processing components and can be stored and restored between + * executions. */ @Data public class StreamContext { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java index 63d6a5b163..49a91dd9cc 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/BoundedOutOfOrderWatermarkGenerator.java @@ -7,9 +7,7 @@ import lombok.RequiredArgsConstructor; -/** - * Watermark generator that generates watermark with bounded out-of-order delay. - */ +/** Watermark generator that generates watermark with bounded out-of-order delay. */ @RequiredArgsConstructor public class BoundedOutOfOrderWatermarkGenerator implements WatermarkGenerator { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java index 4f4c9a8a00..e4a44e5169 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/watermark/WatermarkGenerator.java @@ -6,8 +6,8 @@ package org.opensearch.sql.planner.streaming.watermark; /** - * A watermark generator generates watermark timestamp based on some strategy which is defined - * in implementation class. + * A watermark generator generates watermark timestamp based on some strategy which is defined in + * implementation class. */ public interface WatermarkGenerator { @@ -18,5 +18,4 @@ public interface WatermarkGenerator { * @return watermark timestamp in millisecond */ long generate(long timestamp); - } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java index 2a85ea391c..3d5b180346 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/Window.java @@ -7,9 +7,7 @@ import lombok.Data; -/** - * A time window is a window of time interval with inclusive start time and exclusive end time. - */ +/** A time window is a window of time interval with inclusive start time and exclusive end time. */ @Data public class Window { @@ -19,9 +17,7 @@ public class Window { /** End timestamp (exclusive) of the time window. */ private final long endTime; - /** - * Return the maximum timestamp (inclusive) of the window. - */ + /** Return the maximum timestamp (inclusive) of the window. */ public long maxTimestamp() { return endTime - 1; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java index f0f47fd575..1b1f12a573 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/SlidingWindowAssigner.java @@ -12,8 +12,8 @@ import org.opensearch.sql.utils.DateTimeUtils; /** - * A sliding window assigner assigns multiple overlapped window per event timestamp. - * The overlap size is determined by the given slide interval. + * A sliding window assigner assigns multiple overlapped window per event timestamp. The overlap + * size is determined by the given slide interval. */ public class SlidingWindowAssigner implements WindowAssigner { @@ -27,13 +27,13 @@ public class SlidingWindowAssigner implements WindowAssigner { * Create sliding window assigner with the given window and slide size in millisecond. * * @param windowSize window size in millisecond - * @param slideSize slide size in millisecond + * @param slideSize slide size in millisecond */ public SlidingWindowAssigner(long windowSize, long slideSize) { - Preconditions.checkArgument(windowSize > 0, - "Window size [%s] must be positive number", windowSize); - Preconditions.checkArgument(slideSize > 0, - "Slide size [%s] must be positive number", slideSize); + Preconditions.checkArgument( + windowSize > 0, "Window size [%s] must be positive number", windowSize); + Preconditions.checkArgument( + slideSize > 0, "Slide size [%s] must be positive number", slideSize); this.windowSize = windowSize; this.slideSize = slideSize; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java index 192bb6c429..2591689a35 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/TumblingWindowAssigner.java @@ -11,9 +11,7 @@ import org.opensearch.sql.planner.streaming.windowing.Window; import org.opensearch.sql.utils.DateTimeUtils; -/** - * A tumbling window assigner assigns a single window per event timestamp without overlap. - */ +/** A tumbling window assigner assigns a single window per event timestamp without overlap. */ public class TumblingWindowAssigner implements WindowAssigner { /** Window size in millisecond. */ @@ -25,8 +23,8 @@ public class TumblingWindowAssigner implements WindowAssigner { * @param windowSize window size in millisecond */ public TumblingWindowAssigner(long windowSize) { - Preconditions.checkArgument(windowSize > 0, - "Window size [%s] must be positive number", windowSize); + Preconditions.checkArgument( + windowSize > 0, "Window size [%s] must be positive number", windowSize); this.windowSize = windowSize; } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java index dac882c5ff..fd615c2d5e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/assigner/WindowAssigner.java @@ -9,16 +9,16 @@ import org.opensearch.sql.planner.streaming.windowing.Window; /** - * A window assigner assigns zero or more window to an event timestamp - * based on different windowing approach. + * A window assigner assigns zero or more window to an event timestamp based on different windowing + * approach. */ public interface WindowAssigner { /** * Return window(s) assigned to the timestamp. + * * @param timestamp given event timestamp * @return windows assigned */ List assign(long timestamp); - } diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java index 1801880961..f614ce847e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/AfterWatermarkWindowTrigger.java @@ -11,8 +11,8 @@ /** * After watermark window trigger fires window state output once a window is below watermark. - * Precisely speaking, after watermark means the window boundary (max timestamp) is equal to - * or less than the current watermark timestamp. + * Precisely speaking, after watermark means the window boundary (max timestamp) is equal to or less + * than the current watermark timestamp. */ @RequiredArgsConstructor public class AfterWatermarkWindowTrigger implements WindowTrigger { diff --git a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java index 465f0aa9eb..30dba22725 100644 --- a/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java +++ b/core/src/main/java/org/opensearch/sql/planner/streaming/windowing/trigger/TriggerResult.java @@ -8,9 +8,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Result determined by a trigger for what should happen to the window. - */ +/** Result determined by a trigger for what should happen to the window. */ @Getter @RequiredArgsConstructor public enum TriggerResult { diff --git a/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java b/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java index 8512eddbe3..69d902c1d7 100644 --- a/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java +++ b/core/src/main/java/org/opensearch/sql/storage/DataSourceFactory.java @@ -19,14 +19,9 @@ * {@link DataSourceFactory}. */ public interface DataSourceFactory { - /** - * Get {@link DataSourceType}. - */ + /** Get {@link DataSourceType}. */ DataSourceType getDataSourceType(); - /** - * Create {@link DataSource}. - */ + /** Create {@link DataSource}. */ DataSource createDataSource(DataSourceMetadata metadata); - } diff --git a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java index ffcc0911de..c3b54beaaa 100644 --- a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java +++ b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage; import java.util.Collection; @@ -11,14 +10,10 @@ import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.expression.function.FunctionResolver; -/** - * Storage engine for different storage to provide data access API implementation. - */ +/** Storage engine for different storage to provide data access API implementation. */ public interface StorageEngine { - /** - * Get {@link Table} from storage engine. - */ + /** Get {@link Table} from storage engine. */ Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableName); /** diff --git a/core/src/main/java/org/opensearch/sql/storage/Table.java b/core/src/main/java/org/opensearch/sql/storage/Table.java index fc1def5a2e..33dbd7d66d 100644 --- a/core/src/main/java/org/opensearch/sql/storage/Table.java +++ b/core/src/main/java/org/opensearch/sql/storage/Table.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage; import java.util.Map; @@ -15,9 +14,7 @@ import org.opensearch.sql.storage.read.TableScanBuilder; import org.opensearch.sql.storage.write.TableWriteBuilder; -/** - * Table. - */ +/** Table. */ public interface Table { /** @@ -38,14 +35,10 @@ default void create(Map schema) { throw new UnsupportedOperationException("Unsupported Operation"); } - /** - * Get the {@link ExprType} for each field in the table. - */ + /** Get the {@link ExprType} for each field in the table. */ Map getFieldTypes(); - /** - * Get the {@link ExprType} for each meta-field (reserved fields) in the table. - */ + /** Get the {@link ExprType} for each meta-field (reserved fields) in the table. */ default Map getReservedFieldTypes() { return Map.of(); } @@ -61,8 +54,8 @@ default Map getReservedFieldTypes() { PhysicalPlan implement(LogicalPlan plan); /** - * Optimize the {@link LogicalPlan} by storage engine rule. - * The default optimize solution is no optimization. + * Optimize the {@link LogicalPlan} by storage engine rule. The default optimize solution is no + * optimization. * * @param plan logical plan. * @return logical plan. @@ -89,15 +82,11 @@ default TableScanBuilder createScanBuilder() { * @return table write builder */ default TableWriteBuilder createWriteBuilder(LogicalWrite plan) { - throw new UnsupportedOperationException( - "Write operation is not supported on current table"); + throw new UnsupportedOperationException("Write operation is not supported on current table"); } - /** - * Translate {@link Table} to {@link StreamingSource} if possible. - */ + /** Translate {@link Table} to {@link StreamingSource} if possible. */ default StreamingSource asStreamingSource() { throw new UnsupportedOperationException(); } - } diff --git a/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java b/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java index 1b8e33bc4f..130516b3ef 100644 --- a/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java +++ b/core/src/main/java/org/opensearch/sql/storage/TableScanOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage; import java.util.Collections; @@ -12,8 +11,8 @@ import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; /** - * Abstract table scan class for different storage to implement. - * This is also to avoid "polluting" physical plan visitor by concrete table scan implementation. + * Abstract table scan class for different storage to implement. This is also to avoid "polluting" + * physical plan visitor by concrete table scan implementation. */ public abstract class TableScanOperator extends PhysicalPlan { diff --git a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java index 51a0348116..2487c651ad 100644 --- a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java +++ b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/BindingTuple.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage.bindingtuple; import org.opensearch.sql.data.model.ExprMissingValue; @@ -14,20 +13,19 @@ import org.opensearch.sql.expression.env.Environment; /** - * BindingTuple represents the a relationship between bindingName and ExprValue. - * e.g. The operation output column name is bindingName, the value is the ExprValue. + * BindingTuple represents the a relationship between bindingName and ExprValue. e.g. The operation + * output column name is bindingName, the value is the ExprValue. */ public abstract class BindingTuple implements Environment { - public static BindingTuple EMPTY = new BindingTuple() { - @Override - public ExprValue resolve(ReferenceExpression ref) { - return ExprMissingValue.of(); - } - }; + public static BindingTuple EMPTY = + new BindingTuple() { + @Override + public ExprValue resolve(ReferenceExpression ref) { + return ExprMissingValue.of(); + } + }; - /** - * Resolve {@link Expression} in the BindingTuple environment. - */ + /** Resolve {@link Expression} in the BindingTuple environment. */ @Override public ExprValue resolve(Expression var) { if (var instanceof ReferenceExpression) { @@ -37,8 +35,6 @@ public ExprValue resolve(Expression var) { } } - /** - * Resolve the {@link ReferenceExpression} in BindingTuple context. - */ + /** Resolve the {@link ReferenceExpression} in BindingTuple context. */ public abstract ExprValue resolve(ReferenceExpression ref); } diff --git a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java index 4589731442..d43a3f2a1b 100644 --- a/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java +++ b/core/src/main/java/org/opensearch/sql/storage/bindingtuple/LazyBindingTuple.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.storage.bindingtuple; import java.util.function.Supplier; @@ -12,9 +11,7 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Lazy Implementation of {@link BindingTuple}. - */ +/** Lazy Implementation of {@link BindingTuple}. */ @RequiredArgsConstructor public class LazyBindingTuple extends BindingTuple { private final Supplier lazyBinding; diff --git a/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java b/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java index f0158c52b8..b2da0b67a4 100644 --- a/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java +++ b/core/src/main/java/org/opensearch/sql/storage/read/TableScanBuilder.java @@ -19,16 +19,14 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * A TableScanBuilder represents transition state between logical planning and physical planning - * for table scan operator. The concrete implementation class gets involved in the logical - * optimization through this abstraction and thus get the chance to handle push down optimization - * without intruding core engine. + * A TableScanBuilder represents transition state between logical planning and physical planning for + * table scan operator. The concrete implementation class gets involved in the logical optimization + * through this abstraction and thus get the chance to handle push down optimization without + * intruding core engine. */ public abstract class TableScanBuilder extends LogicalPlan { - /** - * Construct and initialize children to empty list. - */ + /** Construct and initialize children to empty list. */ protected TableScanBuilder() { super(Collections.emptyList()); } @@ -41,8 +39,8 @@ protected TableScanBuilder() { public abstract TableScanOperator build(); /** - * Can a given filter operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given filter operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param filter logical filter operator * @return true if pushed down, otherwise false @@ -52,8 +50,8 @@ public boolean pushDownFilter(LogicalFilter filter) { } /** - * Can a given aggregate operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given aggregate operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param aggregation logical aggregate operator * @return true if pushed down, otherwise false @@ -63,8 +61,8 @@ public boolean pushDownAggregation(LogicalAggregation aggregation) { } /** - * Can a given sort operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given sort operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param sort logical sort operator * @return true if pushed down, otherwise false @@ -74,8 +72,8 @@ public boolean pushDownSort(LogicalSort sort) { } /** - * Can a given limit operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given limit operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param limit logical limit operator * @return true if pushed down, otherwise false @@ -85,8 +83,8 @@ public boolean pushDownLimit(LogicalLimit limit) { } /** - * Can a given project operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given project operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param project logical project operator * @return true if pushed down, otherwise false @@ -96,8 +94,8 @@ public boolean pushDownProject(LogicalProject project) { } /** - * Can a given highlight operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given highlight operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param highlight logical highlight operator * @return true if pushed down, otherwise false @@ -107,8 +105,8 @@ public boolean pushDownHighlight(LogicalHighlight highlight) { } /** - * Can a given nested operator be pushed down to table scan builder. Assume no such support - * by default unless subclass override this. + * Can a given nested operator be pushed down to table scan builder. Assume no such support by + * default unless subclass override this. * * @param nested logical nested operator * @return true if pushed down, otherwise false diff --git a/core/src/main/java/org/opensearch/sql/storage/split/Split.java b/core/src/main/java/org/opensearch/sql/storage/split/Split.java index e9e0c6fcc1..1cb0ca57ce 100644 --- a/core/src/main/java/org/opensearch/sql/storage/split/Split.java +++ b/core/src/main/java/org/opensearch/sql/storage/split/Split.java @@ -8,13 +8,14 @@ import org.opensearch.sql.storage.StorageEngine; /** - * Split is a sections of a data set. Each {@link StorageEngine} should have specific - * implementation of Split. + * Split is a sections of a data set. Each {@link StorageEngine} should have specific implementation + * of Split. */ public interface Split { /** * Get the split id. + * * @return split id. */ String getSplitId(); diff --git a/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java b/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java index 54dfa5d557..af18916f71 100644 --- a/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java +++ b/core/src/main/java/org/opensearch/sql/storage/write/TableWriteBuilder.java @@ -18,9 +18,7 @@ */ public abstract class TableWriteBuilder extends LogicalPlan { - /** - * Construct table write builder with child node. - */ + /** Construct table write builder with child node. */ public TableWriteBuilder(LogicalPlan child) { super(Collections.singletonList(child)); } @@ -28,7 +26,7 @@ public TableWriteBuilder(LogicalPlan child) { /** * Build table write operator with given child node. * - * @param child child operator node + * @param child child operator node * @return table write operator */ public abstract TableWriteOperator build(PhysicalPlan child); diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java index 39726bc975..18e6541514 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeFormatters.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import static java.time.temporal.ChronoField.DAY_OF_MONTH; @@ -22,26 +21,23 @@ import java.util.Locale; import lombok.experimental.UtilityClass; -/** - * DateTimeFormatter. - * Reference org.opensearch.common.time.DateFormatters. - */ +/** DateTimeFormatter. Reference org.opensearch.common.time.DateFormatters. */ @UtilityClass public class DateTimeFormatters { - //Length of a date formatted as YYYYMMDD. + // Length of a date formatted as YYYYMMDD. public static final int FULL_DATE_LENGTH = 8; - //Length of a date formatted as YYMMDD. + // Length of a date formatted as YYMMDD. public static final int SHORT_DATE_LENGTH = 6; - //Length of a date formatted as YMMDD. + // Length of a date formatted as YMMDD. public static final int SINGLE_DIGIT_YEAR_DATE_LENGTH = 5; - //Length of a date formatted as MMDD. + // Length of a date formatted as MMDD. public static final int NO_YEAR_DATE_LENGTH = 4; - //Length of a date formatted as MDD. + // Length of a date formatted as MDD. public static final int SINGLE_DIGIT_MONTH_DATE_LENGTH = 3; private static final int MIN_FRACTION_SECONDS = 0; @@ -110,8 +106,8 @@ public class DateTimeFormatters { .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); - public static final DateTimeFormatter SQL_LITERAL_DATE_TIME_FORMAT = DateTimeFormatter - .ofPattern("yyyy-MM-dd HH:mm:ss"); + public static final DateTimeFormatter SQL_LITERAL_DATE_TIME_FORMAT = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); public static final DateTimeFormatter DATE_TIME_FORMATTER = new DateTimeFormatterBuilder() @@ -120,9 +116,7 @@ public class DateTimeFormatters { .appendOptional(STRICT_HOUR_MINUTE_SECOND_FORMATTER) .toFormatter(); - /** - * todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. - */ + /** todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. */ public static final DateTimeFormatter DATE_TIME_FORMATTER_WITHOUT_NANO = SQL_LITERAL_DATE_TIME_FORMAT; @@ -130,10 +124,7 @@ public class DateTimeFormatters { new DateTimeFormatterBuilder() .appendPattern("uuuu-MM-dd HH:mm:ss") .appendFraction( - ChronoField.NANO_OF_SECOND, - MIN_FRACTION_SECONDS, - MAX_FRACTION_SECONDS, - true) + ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); @@ -141,10 +132,7 @@ public class DateTimeFormatters { new DateTimeFormatterBuilder() .appendPattern("[uuuu-MM-dd HH:mm:ss][uuuu-MM-dd HH:mm][HH:mm:ss][HH:mm][uuuu-MM-dd]") .appendFraction( - ChronoField.NANO_OF_SECOND, - MIN_FRACTION_SECONDS, - MAX_FRACTION_SECONDS, - true) + ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) .toFormatter(Locale.ROOT) .withResolverStyle(ResolverStyle.STRICT); @@ -199,7 +187,7 @@ public class DateTimeFormatters { // YYYYMMDDhhmmss public static final DateTimeFormatter DATE_TIME_FORMATTER_LONG_YEAR = new DateTimeFormatterBuilder() - .appendValue(YEAR,4) + .appendValue(YEAR, 4) .appendPattern("MMddHHmmss") .toFormatter() .withResolverStyle(ResolverStyle.STRICT); @@ -214,11 +202,8 @@ public class DateTimeFormatters { // uuuu-MM-dd HH:mm:ss[xxx] public static final DateTimeFormatter DATE_TIME_FORMATTER_WITH_TZ = new DateTimeFormatterBuilder() - .appendPattern("uuuu-MM-dd HH:mm:ss[xxx]") - .appendFraction( - ChronoField.NANO_OF_SECOND, - MIN_FRACTION_SECONDS, - MAX_FRACTION_SECONDS, - true) - .toFormatter(); + .appendPattern("uuuu-MM-dd HH:mm:ss[xxx]") + .appendFraction( + ChronoField.NANO_OF_SECOND, MIN_FRACTION_SECONDS, MAX_FRACTION_SECONDS, true) + .toFormatter(); } diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java index 74fdf42571..593b4c4471 100644 --- a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java @@ -21,9 +21,9 @@ public class DateTimeUtils { /** * Util method to round the date/time with given unit. * - * @param utcMillis Date/time value to round, given in utc millis - * @param unitMillis Date/time interval unit in utc millis - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param unitMillis Date/time interval unit in utc millis + * @return Rounded date/time value in utc millis */ public static long roundFloor(long utcMillis, long unitMillis) { return utcMillis - utcMillis % unitMillis; @@ -32,9 +32,9 @@ public static long roundFloor(long utcMillis, long unitMillis) { /** * Util method to round the date/time in week(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of weeks as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of weeks as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundWeek(long utcMillis, int interval) { return roundFloor(utcMillis + 259200000L, 604800000L * interval) - 259200000L; @@ -43,16 +43,18 @@ public static long roundWeek(long utcMillis, int interval) { /** * Util method to round the date/time in month(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of months as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of months as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundMonth(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID) - .plusMonths(interval); - long monthDiff = (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime - .getMonthValue() - initDateTime.getMonthValue(); + ZonedDateTime zonedDateTime = + Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval); + long monthDiff = + (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + + zonedDateTime.getMonthValue() + - initDateTime.getMonthValue(); long monthToAdd = (monthDiff / interval - 1) * interval; return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); } @@ -60,16 +62,18 @@ public static long roundMonth(long utcMillis, int interval) { /** * Util method to round the date/time in quarter(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of quarters as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of quarters as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundQuarter(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); - ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID) - .plusMonths(interval * 3L); - long monthDiff = ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime - .getMonthValue() - initDateTime.getMonthValue()); + ZonedDateTime zonedDateTime = + Instant.ofEpochMilli(utcMillis).atZone(UTC_ZONE_ID).plusMonths(interval * 3L); + long monthDiff = + ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + + zonedDateTime.getMonthValue() + - initDateTime.getMonthValue()); long monthToAdd = (monthDiff / (interval * 3L) - 1) * interval * 3; return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); } @@ -77,9 +81,9 @@ public static long roundQuarter(long utcMillis, int interval) { /** * Util method to round the date/time in year(s). * - * @param utcMillis Date/time value to round, given in utc millis - * @param interval Number of years as the rounding interval - * @return Rounded date/time value in utc millis + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of years as the rounding interval + * @return Rounded date/time value in utc millis */ public static long roundYear(long utcMillis, int interval) { ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, UTC_ZONE_ID); @@ -124,29 +128,25 @@ public Boolean isValidMySqlTimeZoneId(ZoneId zone) { ZonedDateTime passedTzValidator = defaultDateTime.withZoneSameInstant(zone).withZoneSameLocal(defaultTz); - return (passedTzValidator.isBefore(maxTzValidator) - || passedTzValidator.isEqual(maxTzValidator)) - && (passedTzValidator.isAfter(minTzValidator) - || passedTzValidator.isEqual(minTzValidator)); + return (passedTzValidator.isBefore(maxTzValidator) || passedTzValidator.isEqual(maxTzValidator)) + && (passedTzValidator.isAfter(minTzValidator) || passedTzValidator.isEqual(minTzValidator)); } /** - * Extracts LocalDateTime from a datetime ExprValue. - * Uses `FunctionProperties` for `ExprTimeValue`. + * Extracts LocalDateTime from a datetime ExprValue. Uses `FunctionProperties` for + * `ExprTimeValue`. */ - public static LocalDateTime extractDateTime(ExprValue value, - FunctionProperties functionProperties) { + public static LocalDateTime extractDateTime( + ExprValue value, FunctionProperties functionProperties) { return value instanceof ExprTimeValue ? ((ExprTimeValue) value).datetimeValue(functionProperties) : value.datetimeValue(); } /** - * Extracts LocalDate from a datetime ExprValue. - * Uses `FunctionProperties` for `ExprTimeValue`. + * Extracts LocalDate from a datetime ExprValue. Uses `FunctionProperties` for `ExprTimeValue`. */ - public static LocalDate extractDate(ExprValue value, - FunctionProperties functionProperties) { + public static LocalDate extractDate(ExprValue value, FunctionProperties functionProperties) { return value instanceof ExprTimeValue ? ((ExprTimeValue) value).dateValue(functionProperties) : value.dateValue(); diff --git a/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java b/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java index e8324af5f4..f04bf3748f 100644 --- a/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/ExpressionUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import java.util.List; @@ -11,17 +10,13 @@ import lombok.experimental.UtilityClass; import org.opensearch.sql.expression.Expression; -/** - * Utils for {@link Expression}. - */ +/** Utils for {@link Expression}. */ @UtilityClass public class ExpressionUtils { public static String PATH_SEP = "."; - /** - * Format the list of {@link Expression}. - */ + /** Format the list of {@link Expression}. */ public static String format(List expressionList) { return expressionList.stream().map(Expression::toString).collect(Collectors.joining(",")); } diff --git a/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java b/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java index f4ece6a190..d9ae0b4258 100644 --- a/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/OperatorUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import java.util.regex.Pattern; @@ -15,9 +14,10 @@ @UtilityClass public class OperatorUtils { /** - * Wildcard pattern matcher util. - * Percent (%) character for wildcard, + * Wildcard pattern matcher util.
+ * Percent (%) character for wildcard,
* Underscore (_) character for a single character match. + * * @param pattern string pattern to match. * @return if text matches pattern returns true; else return false. */ @@ -30,12 +30,13 @@ public static ExprBooleanValue matches(ExprValue text, ExprValue pattern) { /** * Checks if text matches regular expression pattern. + * * @param pattern string pattern to match. * @return if text matches pattern returns true; else return false. */ public static ExprIntegerValue matchesRegexp(ExprValue text, ExprValue pattern) { - return new ExprIntegerValue(Pattern.compile(pattern.stringValue()).matcher(text.stringValue()) - .matches() ? 1 : 0); + return new ExprIntegerValue( + Pattern.compile(pattern.stringValue()).matcher(text.stringValue()).matches() ? 1 : 0); } private static final char DEFAULT_ESCAPE = '\\'; diff --git a/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java b/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java index 6c640482d0..e659cfdf50 100644 --- a/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/ParseUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import com.google.common.collect.ImmutableMap; @@ -18,30 +17,27 @@ import org.opensearch.sql.expression.parse.PatternsExpression; import org.opensearch.sql.expression.parse.RegexExpression; -/** - * Utils for {@link ParseExpression}. - */ +/** Utils for {@link ParseExpression}. */ @UtilityClass public class ParseUtils { private static final String NEW_FIELD_KEY = "new_field"; - private static final Map FACTORY_MAP = ImmutableMap.of( - ParseMethod.REGEX, RegexExpression::new, - ParseMethod.GROK, GrokExpression::new, - ParseMethod.PATTERNS, PatternsExpression::new - ); + private static final Map FACTORY_MAP = + ImmutableMap.of( + ParseMethod.REGEX, RegexExpression::new, + ParseMethod.GROK, GrokExpression::new, + ParseMethod.PATTERNS, PatternsExpression::new); /** * Construct corresponding ParseExpression by {@link ParseMethod}. * * @param parseMethod method used to parse * @param sourceField source text field - * @param pattern pattern used for parsing - * @param identifier derived field + * @param pattern pattern used for parsing + * @param identifier derived field * @return {@link ParseExpression} */ - public static ParseExpression createParseExpression(ParseMethod parseMethod, - Expression sourceField, Expression pattern, - Expression identifier) { + public static ParseExpression createParseExpression( + ParseMethod parseMethod, Expression sourceField, Expression pattern, Expression identifier) { return FACTORY_MAP.get(parseMethod).initialize(sourceField, pattern, identifier); } @@ -51,21 +47,23 @@ public static ParseExpression createParseExpression(ParseMethod parseMethod, * @param pattern pattern used for parsing * @return list of names of the derived fields */ - public static List getNamedGroupCandidates(ParseMethod parseMethod, String pattern, - Map arguments) { + public static List getNamedGroupCandidates( + ParseMethod parseMethod, String pattern, Map arguments) { switch (parseMethod) { case REGEX: return RegexExpression.getNamedGroupCandidates(pattern); case GROK: return GrokExpression.getNamedGroupCandidates(pattern); default: - return PatternsExpression.getNamedGroupCandidates(arguments.containsKey(NEW_FIELD_KEY) - ? (String) arguments.get(NEW_FIELD_KEY).getValue() : null); + return PatternsExpression.getNamedGroupCandidates( + arguments.containsKey(NEW_FIELD_KEY) + ? (String) arguments.get(NEW_FIELD_KEY).getValue() + : null); } } private interface ParseExpressionFactory { - ParseExpression initialize(Expression sourceField, Expression expression, - Expression identifier); + ParseExpression initialize( + Expression sourceField, Expression expression, Expression identifier); } } diff --git a/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java b/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java index 5325ea371a..38d2753f6c 100644 --- a/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java +++ b/core/src/main/java/org/opensearch/sql/utils/SystemIndexUtils.java @@ -3,44 +3,32 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.utils; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.experimental.UtilityClass; -/** - * System Index Utils. - * Todo. Find the better name for this class. - */ +/** System Index Utils. Todo. Find the better name for this class. */ @UtilityClass public class SystemIndexUtils { public static final String TABLE_NAME_FOR_TABLES_INFO = "tables"; - /** - * The suffix of all the system tables. - */ + + /** The suffix of all the system tables. */ private static final String SYS_TABLES_SUFFIX = "ODFE_SYS_TABLE"; - /** - * The suffix of all the meta tables. - */ + /** The suffix of all the meta tables. */ private static final String SYS_META_SUFFIX = "META_" + SYS_TABLES_SUFFIX; - /** - * The suffix of all the table mappings. - */ - private static final String SYS_MAPPINGS_SUFFIX = "MAPPINGS_" + SYS_TABLES_SUFFIX; + /** The suffix of all the table mappings. */ + private static final String SYS_MAPPINGS_SUFFIX = "MAPPINGS_" + SYS_TABLES_SUFFIX; - /** - * The ALL.META_ODFE_SYS_TABLE contain all the table info. - */ + /** The ALL.META_ODFE_SYS_TABLE contain all the table info. */ public static final String TABLE_INFO = "ALL." + SYS_META_SUFFIX; public static final String DATASOURCES_TABLE_NAME = ".DATASOURCES"; - public static Boolean isSystemIndex(String indexName) { return indexName.endsWith(SYS_TABLES_SUFFIX); } @@ -62,8 +50,7 @@ public static String mappingTable(String indexName) { public static SystemTable systemTable(String indexName) { final int lastDot = indexName.lastIndexOf("."); String suffix = indexName.substring(lastDot + 1); - String tableName = indexName.substring(0, lastDot) - .replace("%", "*"); + String tableName = indexName.substring(0, lastDot).replace("%", "*"); if (suffix.equalsIgnoreCase(SYS_META_SUFFIX)) { return new SystemInfoTable(tableName); @@ -74,9 +61,7 @@ public static SystemTable systemTable(String indexName) { } } - /** - * System Table. - */ + /** System Table. */ public interface SystemTable { String getTableName(); @@ -90,9 +75,7 @@ default boolean isMetaInfoTable() { } } - /** - * System Info Table. - */ + /** System Info Table. */ @Getter @RequiredArgsConstructor public static class SystemInfoTable implements SystemTable { @@ -104,9 +87,7 @@ public boolean isSystemInfoTable() { } } - /** - * System Table. - */ + /** System Table. */ @Getter @RequiredArgsConstructor public static class MetaInfoTable implements SystemTable { diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java index 0d643aa53f..b052fe47ce 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalysisContextTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java index 100cfd67af..2f4d6e8ada 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static java.util.Collections.emptyList; @@ -134,17 +133,13 @@ public void filter_relation_with_reserved_qualifiedName() { @Test public void filter_relation_with_invalid_qualifiedName_SemanticCheckException() { - UnresolvedPlan invalidFieldPlan = AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.equalTo( - AstDSL.qualifiedName("_invalid"), - AstDSL.stringLiteral("value")) - ); + UnresolvedPlan invalidFieldPlan = + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.equalTo(AstDSL.qualifiedName("_invalid"), AstDSL.stringLiteral("value"))); SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, - () -> analyze(invalidFieldPlan)); + assertThrows(SemanticCheckException.class, () -> analyze(invalidFieldPlan)); assertEquals( "can't resolve Symbol(namespace=FIELD_NAME, name=_invalid) in type env", exception.getMessage()); @@ -152,15 +147,13 @@ public void filter_relation_with_invalid_qualifiedName_SemanticCheckException() @Test public void filter_relation_with_invalid_qualifiedName_ExpressionEvaluationException() { - UnresolvedPlan typeMismatchPlan = AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.equalTo(AstDSL.qualifiedName("_test"), AstDSL.intLiteral(1)) - ); + UnresolvedPlan typeMismatchPlan = + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.equalTo(AstDSL.qualifiedName("_test"), AstDSL.intLiteral(1))); ExpressionEvaluationException exception = - assertThrows( - ExpressionEvaluationException.class, - () -> analyze(typeMismatchPlan)); + assertThrows(ExpressionEvaluationException.class, () -> analyze(typeMismatchPlan)); assertEquals( "= function expected {[BYTE,BYTE],[SHORT,SHORT],[INTEGER,INTEGER],[LONG,LONG]," + "[FLOAT,FLOAT],[DOUBLE,DOUBLE],[STRING,STRING],[BOOLEAN,BOOLEAN],[DATE,DATE]," @@ -265,8 +258,8 @@ public void filter_relation_with_non_existing_datasource_with_three_parts() { LogicalPlanDSL.relation("test.nonexisting_schema.http_total_requests", table), DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1)))), AstDSL.filter( - AstDSL.relation(AstDSL.qualifiedName("test", - "nonexisting_schema", "http_total_requests")), + AstDSL.relation( + AstDSL.qualifiedName("test", "nonexisting_schema", "http_total_requests")), AstDSL.equalTo(AstDSL.field("integer_value"), AstDSL.intLiteral(1)))); } @@ -283,73 +276,68 @@ public void filter_relation_with_multiple_tables() { @Test public void analyze_filter_visit_score_function() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - new ScoreFunction( - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("boost", stringLiteral("3")) - ), AstDSL.doubleLiteral(1.0)) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + new ScoreFunction( + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("boost", stringLiteral("3"))), + AstDSL.doubleLiteral(1.0))); assertAnalyzeEqual( LogicalPlanDSL.filter( LogicalPlanDSL.relation("schema", table), DSL.match_phrase_prefix( DSL.namedArgument("field", "field_value1"), DSL.namedArgument("query", "search query"), - DSL.namedArgument("boost", "3.0") - ) - ), - unresolvedPlan - ); + DSL.namedArgument("boost", "3.0"))), + unresolvedPlan); LogicalPlan logicalPlan = analyze(unresolvedPlan); OpenSearchFunctions.OpenSearchFunction relevanceQuery = - (OpenSearchFunctions.OpenSearchFunction)((LogicalFilter) logicalPlan).getCondition(); + (OpenSearchFunctions.OpenSearchFunction) ((LogicalFilter) logicalPlan).getCondition(); assertEquals(true, relevanceQuery.isScoreTracked()); } @Test public void analyze_filter_visit_without_score_function() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("boost", stringLiteral("3")) - ) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("boost", stringLiteral("3")))); assertAnalyzeEqual( LogicalPlanDSL.filter( LogicalPlanDSL.relation("schema", table), DSL.match_phrase_prefix( DSL.namedArgument("field", "field_value1"), DSL.namedArgument("query", "search query"), - DSL.namedArgument("boost", "3") - ) - ), - unresolvedPlan - ); + DSL.namedArgument("boost", "3"))), + unresolvedPlan); LogicalPlan logicalPlan = analyze(unresolvedPlan); OpenSearchFunctions.OpenSearchFunction relevanceQuery = - (OpenSearchFunctions.OpenSearchFunction)((LogicalFilter) logicalPlan).getCondition(); + (OpenSearchFunctions.OpenSearchFunction) ((LogicalFilter) logicalPlan).getCondition(); assertEquals(false, relevanceQuery.isScoreTracked()); } @Test public void analyze_filter_visit_score_function_with_double_boost() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - new ScoreFunction( - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("slop", stringLiteral("3")) - ), new Literal(3.0, DataType.DOUBLE) - ) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + new ScoreFunction( + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("slop", stringLiteral("3"))), + new Literal(3.0, DataType.DOUBLE))); assertAnalyzeEqual( LogicalPlanDSL.filter( @@ -358,44 +346,36 @@ public void analyze_filter_visit_score_function_with_double_boost() { DSL.namedArgument("field", "field_value1"), DSL.namedArgument("query", "search query"), DSL.namedArgument("slop", "3"), - DSL.namedArgument("boost", "3.0") - ) - ), - unresolvedPlan - ); + DSL.namedArgument("boost", "3.0"))), + unresolvedPlan); LogicalPlan logicalPlan = analyze(unresolvedPlan); OpenSearchFunctions.OpenSearchFunction relevanceQuery = - (OpenSearchFunctions.OpenSearchFunction)((LogicalFilter) logicalPlan).getCondition(); + (OpenSearchFunctions.OpenSearchFunction) ((LogicalFilter) logicalPlan).getCondition(); assertEquals(true, relevanceQuery.isScoreTracked()); } @Test public void analyze_filter_visit_score_function_with_unsupported_boost_SemanticCheckException() { - UnresolvedPlan unresolvedPlan = AstDSL.filter( - AstDSL.relation("schema"), - new ScoreFunction( - AstDSL.function("match_phrase_prefix", - AstDSL.unresolvedArg("field", stringLiteral("field_value1")), - AstDSL.unresolvedArg("query", stringLiteral("search query")), - AstDSL.unresolvedArg("boost", stringLiteral("3")) - ), AstDSL.stringLiteral("3.0") - ) - ); + UnresolvedPlan unresolvedPlan = + AstDSL.filter( + AstDSL.relation("schema"), + new ScoreFunction( + AstDSL.function( + "match_phrase_prefix", + AstDSL.unresolvedArg("field", stringLiteral("field_value1")), + AstDSL.unresolvedArg("query", stringLiteral("search query")), + AstDSL.unresolvedArg("boost", stringLiteral("3"))), + AstDSL.stringLiteral("3.0"))); SemanticCheckException exception = - assertThrows( - SemanticCheckException.class, - () -> analyze(unresolvedPlan)); - assertEquals( - "Expected boost type 'DOUBLE' but got 'STRING'", - exception.getMessage()); + assertThrows(SemanticCheckException.class, () -> analyze(unresolvedPlan)); + assertEquals("Expected boost type 'DOUBLE' but got 'STRING'", exception.getMessage()); } @Test public void head_relation() { assertAnalyzeEqual( - LogicalPlanDSL.limit(LogicalPlanDSL.relation("schema", table), - 10, 0), + LogicalPlanDSL.limit(LogicalPlanDSL.relation("schema", table), 10, 0), AstDSL.head(AstDSL.relation("schema"), 10, 0)); } @@ -418,7 +398,7 @@ public void analyze_filter_aggregation_relation() { DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER))), DSL.named("MIN(integer_value)", DSL.min(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), - DSL.greater(// Expect to be replaced with reference by expression optimizer + DSL.greater( // Expect to be replaced with reference by expression optimizer DSL.ref("MIN(integer_value)", INTEGER), DSL.literal(integerValue(10)))), AstDSL.filter( AstDSL.agg( @@ -429,8 +409,7 @@ public void analyze_filter_aggregation_relation() { emptyList(), ImmutableList.of(alias("string_value", qualifiedName("string_value"))), emptyList()), - compare(">", - aggregate("MIN", qualifiedName("integer_value")), intLiteral(10)))); + compare(">", aggregate("MIN", qualifiedName("integer_value")), intLiteral(10)))); } @Test @@ -449,19 +428,16 @@ public void stats_source() { assertAnalyzeEqual( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), AstDSL.agg( AstDSL.relation("schema"), AstDSL.exprList( AstDSL.alias( - "avg(integer_value)", - AstDSL.aggregate("avg", field("integer_value"))) - ), + "avg(integer_value)", AstDSL.aggregate("avg", field("integer_value")))), null, - ImmutableList.of( - AstDSL.alias("string_value", field("string_value"))), + ImmutableList.of(AstDSL.alias("string_value", field("string_value"))), AstDSL.defaultStatsArgs())); } @@ -473,16 +449,13 @@ public void rare_source() { CommandType.RARE, 10, ImmutableList.of(DSL.ref("string_value", STRING)), - DSL.ref("integer_value", INTEGER) - ), + DSL.ref("integer_value", INTEGER)), AstDSL.rareTopN( AstDSL.relation("schema"), CommandType.RARE, ImmutableList.of(argument("noOfResults", intLiteral(10))), ImmutableList.of(field("string_value")), - field("integer_value") - ) - ); + field("integer_value"))); } @Test @@ -493,16 +466,13 @@ public void top_source() { CommandType.TOP, 5, ImmutableList.of(DSL.ref("string_value", STRING)), - DSL.ref("integer_value", INTEGER) - ), + DSL.ref("integer_value", INTEGER)), AstDSL.rareTopN( AstDSL.relation("schema"), CommandType.TOP, ImmutableList.of(argument("noOfResults", intLiteral(5))), ImmutableList.of(field("string_value")), - field("integer_value") - ) - ); + field("integer_value"))); } @Test @@ -516,8 +486,9 @@ public void rename_to_invalid_expression() { AstDSL.agg( AstDSL.relation("schema"), AstDSL.exprList( - AstDSL.alias("avg(integer_value)", AstDSL.aggregate("avg", field( - "integer_value")))), + AstDSL.alias( + "avg(integer_value)", + AstDSL.aggregate("avg", field("integer_value")))), Collections.emptyList(), ImmutableList.of(), AstDSL.defaultStatsArgs()), @@ -535,8 +506,7 @@ public void project_source() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - DSL.named("double_value", DSL.ref("double_value", DOUBLE)) - ), + DSL.named("double_value", DSL.ref("double_value", DOUBLE))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), @@ -550,34 +520,25 @@ public void project_nested_field_arg() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression( - "nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING)), - null) - ); + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)), null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info)", - function("nested", qualifiedName("message", "info")), null) - ) - ); + AstDSL.alias( + "nested(message.info)", + function("nested", qualifiedName("message", "info")), + null))); assertTrue(isNestedFunction(DSL.nested(DSL.ref("message.info", STRING)))); assertFalse(isNestedFunction(DSL.literal("fieldA"))); @@ -586,64 +547,51 @@ public void project_nested_field_arg() { @Test public void sort_with_nested_all_tuple_fields_throws_exception() { - assertThrows(UnsupportedOperationException.class, () -> analyze( - AstDSL.project( - AstDSL.sort( - AstDSL.relation("schema"), - field(nestedAllTupleFields("message")) - ), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")) - ) - )); + assertThrows( + UnsupportedOperationException.class, + () -> + analyze( + AstDSL.project( + AstDSL.sort(AstDSL.relation("schema"), field(nestedAllTupleFields("message"))), + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message"))))); } @Test public void filter_with_nested_all_tuple_fields_throws_exception() { - assertThrows(UnsupportedOperationException.class, () -> analyze( - AstDSL.project( - AstDSL.filter( - AstDSL.relation("schema"), - AstDSL.function("=", nestedAllTupleFields("message"), AstDSL.intLiteral(1))), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")) - ) - )); + assertThrows( + UnsupportedOperationException.class, + () -> + analyze( + AstDSL.project( + AstDSL.filter( + AstDSL.relation("schema"), + AstDSL.function( + "=", nestedAllTupleFields("message"), AstDSL.intLiteral(1))), + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message"))))); } - @Test public void project_nested_field_star_arg() { List> nestedArgs = List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)))); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")))); } @Test @@ -652,42 +600,29 @@ public void project_nested_field_star_arg_with_another_nested_function() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), + "path", new ReferenceExpression("message", STRING)), Map.of( "field", new ReferenceExpression("comment.data", STRING), - "path", new ReferenceExpression("comment", STRING) - ) - ); + "path", new ReferenceExpression("comment", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - new NamedExpression("nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING))) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + new NamedExpression( + "nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)))); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")), - AstDSL.alias("nested(comment.*)", - nestedAllTupleFields("comment")) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")), + AstDSL.alias("nested(comment.*)", nestedAllTupleFields("comment")))); } @Test @@ -696,38 +631,25 @@ public void project_nested_field_star_arg_with_another_field() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - new NamedExpression("comment.data", - DSL.ref("comment.data", STRING)) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + new NamedExpression("comment.data", DSL.ref("comment.data", STRING))); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("comment.data", - DSL.ref("comment.data", STRING)) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("comment.data", DSL.ref("comment.data", STRING))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")), - AstDSL.alias("comment.data", - field("comment.data")) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")), + AstDSL.alias("comment.data", field("comment.data")))); } @Test @@ -736,41 +658,32 @@ public void project_nested_field_star_arg_with_highlight() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("highlight(fieldA)", - new HighlightExpression(DSL.literal("fieldA"))) - ); + new NamedExpression( + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("highlight(fieldA)", new HighlightExpression(DSL.literal("fieldA")))); Map highlightArgs = new HashMap<>(); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.highlight(LogicalPlanDSL.relation("schema", table), - DSL.literal("fieldA"), highlightArgs), + LogicalPlanDSL.highlight( + LogicalPlanDSL.relation("schema", table), DSL.literal("fieldA"), highlightArgs), nestedArgs, projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("highlight(fieldA)", - new HighlightExpression(DSL.literal("fieldA"))) - ), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("highlight(fieldA)", new HighlightExpression(DSL.literal("fieldA")))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.*)", - nestedAllTupleFields("message")), - AstDSL.alias("highlight(fieldA)", - new HighlightFunction(AstDSL.stringLiteral("fieldA"), highlightArgs)) - ) - ); + AstDSL.alias("nested(message.*)", nestedAllTupleFields("message")), + AstDSL.alias( + "highlight(fieldA)", + new HighlightFunction(AstDSL.stringLiteral("fieldA"), highlightArgs)))); } @Test @@ -779,40 +692,29 @@ public void project_nested_field_and_path_args() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression( "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), - null) - ); + null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named( + "nested(message.info)", + DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info)", - function( - "nested", - qualifiedName("message", "info"), - qualifiedName("message") - ), - null - ) - ) - ); + AstDSL.alias( + "nested(message.info)", + function("nested", qualifiedName("message", "info"), qualifiedName("message")), + null))); } @Test @@ -821,34 +723,25 @@ public void project_nested_deep_field_arg() { List.of( Map.of( "field", new ReferenceExpression("message.info.id", STRING), - "path", new ReferenceExpression("message.info", STRING) - ) - ); + "path", new ReferenceExpression("message.info", STRING))); List projectList = List.of( new NamedExpression( - "nested(message.info.id)", - DSL.nested(DSL.ref("message.info.id", STRING)), - null) - ); + "nested(message.info.id)", DSL.nested(DSL.ref("message.info.id", STRING)), null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info.id)", - DSL.nested(DSL.ref("message.info.id", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info.id)", DSL.nested(DSL.ref("message.info.id", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info.id)", - function("nested", qualifiedName("message", "info", "id")), null) - ) - ); + AstDSL.alias( + "nested(message.info.id)", + function("nested", qualifiedName("message", "info", "id")), + null))); } @Test @@ -857,114 +750,102 @@ public void project_multiple_nested() { List.of( Map.of( "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), + "path", new ReferenceExpression("message", STRING)), Map.of( "field", new ReferenceExpression("comment.data", STRING), - "path", new ReferenceExpression("comment", STRING) - ) - ); + "path", new ReferenceExpression("comment", STRING))); List projectList = List.of( new NamedExpression( - "nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING)), - null), + "nested(message.info)", DSL.nested(DSL.ref("message.info", STRING)), null), new NamedExpression( - "nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING)), - null) - ); + "nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)), null)); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.nested( - LogicalPlanDSL.relation("schema", table), - nestedArgs, - projectList), - DSL.named("nested(message.info)", - DSL.nested(DSL.ref("message.info", STRING))), - DSL.named("nested(comment.data)", - DSL.nested(DSL.ref("comment.data", STRING))) - ), + LogicalPlanDSL.relation("schema", table), nestedArgs, projectList), + DSL.named("nested(message.info)", DSL.nested(DSL.ref("message.info", STRING))), + DSL.named("nested(comment.data)", DSL.nested(DSL.ref("comment.data", STRING)))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("nested(message.info)", - function("nested", qualifiedName("message", "info")), null), - AstDSL.alias("nested(comment.data)", - function("nested", qualifiedName("comment", "data")), null) - ) - ); + AstDSL.alias( + "nested(message.info)", function("nested", qualifiedName("message", "info")), null), + AstDSL.alias( + "nested(comment.data)", + function("nested", qualifiedName("comment", "data")), + null))); } @Test public void project_nested_invalid_field_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested", qualifiedName("message")), null) - ) - ) - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias( + "message", function("nested", qualifiedName("message")), null)))); assertEquals(exception.getMessage(), "Illegal nested field name: message"); } @Test public void project_nested_invalid_arg_type_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested", stringLiteral("message")), null) - ) - ) - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias( + "message", function("nested", stringLiteral("message")), null)))); assertEquals(exception.getMessage(), "Illegal nested field name: message"); } @Test public void project_nested_no_args_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested"), null) - ) - ) - ); - assertEquals(exception.getMessage(), - "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)" - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias("message", function("nested"), null)))); + assertEquals( + exception.getMessage(), + "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)"); } @Test public void project_nested_too_many_args_throws_exception() { - var exception = assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.projectWithArg( - AstDSL.relation("schema"), - AstDSL.defaultFieldsArgs(), - AstDSL.alias("message", - function("nested", - stringLiteral("message.info"), - stringLiteral("message"), - stringLiteral("message")), - null) - ) - ) - ); - assertEquals(exception.getMessage(), - "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)" - ); + var exception = + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.projectWithArg( + AstDSL.relation("schema"), + AstDSL.defaultFieldsArgs(), + AstDSL.alias( + "message", + function( + "nested", + stringLiteral("message.info"), + stringLiteral("message"), + stringLiteral("message")), + null)))); + assertEquals( + exception.getMessage(), + "on nested object only allowed 2 parameters (field,path) or 1 parameter (field)"); } @Test @@ -975,18 +856,17 @@ public void project_highlight() { assertAnalyzeEqual( LogicalPlanDSL.project( - LogicalPlanDSL.highlight(LogicalPlanDSL.relation("schema", table), - DSL.literal("fieldA"), args), - DSL.named("highlight(fieldA, pre_tags='', post_tags='')", - new HighlightExpression(DSL.literal("fieldA"))) - ), + LogicalPlanDSL.highlight( + LogicalPlanDSL.relation("schema", table), DSL.literal("fieldA"), args), + DSL.named( + "highlight(fieldA, pre_tags='', post_tags='')", + new HighlightExpression(DSL.literal("fieldA")))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("highlight(fieldA, pre_tags='', post_tags='')", - new HighlightFunction(AstDSL.stringLiteral("fieldA"), args)) - ) - ); + AstDSL.alias( + "highlight(fieldA, pre_tags='', post_tags='')", + new HighlightFunction(AstDSL.stringLiteral("fieldA"), args)))); } @Test @@ -994,18 +874,13 @@ public void project_highlight_wildcard() { Map args = new HashMap<>(); assertAnalyzeEqual( LogicalPlanDSL.project( - LogicalPlanDSL.highlight(LogicalPlanDSL.relation("schema", table), - DSL.literal("*"), args), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ), + LogicalPlanDSL.highlight( + LogicalPlanDSL.relation("schema", table), DSL.literal("*"), args), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*")))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), - AstDSL.alias("highlight(*)", - new HighlightFunction(AstDSL.stringLiteral("*"), args)) - ) - ); + AstDSL.alias("highlight(*)", new HighlightFunction(AstDSL.stringLiteral("*"), args)))); } @Test @@ -1013,8 +888,8 @@ public void remove_source() { assertAnalyzeEqual( LogicalPlanDSL.remove( LogicalPlanDSL.relation("schema", table), - DSL.ref("integer_value", INTEGER), DSL.ref( - "double_value", DOUBLE)), + DSL.ref("integer_value", INTEGER), + DSL.ref("double_value", DOUBLE)), AstDSL.projectWithArg( AstDSL.relation("schema"), Collections.singletonList(argument("exclude", booleanLiteral(true))), @@ -1022,7 +897,8 @@ public void remove_source() { AstDSL.field("double_value"))); } - @Disabled("the project/remove command should shrink the type env. Should be enabled once " + @Disabled( + "the project/remove command should shrink the type env. Should be enabled once " + "https://github.com/opensearch-project/sql/issues/917 is resolved") @Test public void project_source_change_type_env() { @@ -1048,15 +924,12 @@ public void project_values() { LogicalPlanDSL.values(ImmutableList.of(DSL.literal(123))), DSL.named("123", DSL.literal(123)), DSL.named("hello", DSL.literal("hello")), - DSL.named("false", DSL.literal(false)) - ), + DSL.named("false", DSL.literal(false))), AstDSL.project( AstDSL.values(ImmutableList.of(AstDSL.intLiteral(123))), AstDSL.alias("123", AstDSL.intLiteral(123)), AstDSL.alias("hello", AstDSL.stringLiteral("hello")), - AstDSL.alias("false", AstDSL.booleanLiteral(false)) - ) - ); + AstDSL.alias("false", AstDSL.booleanLiteral(false)))); } @SuppressWarnings("unchecked") @@ -1069,8 +942,7 @@ public void sort_with_aggregator() { LogicalPlanDSL.relation("test", table), ImmutableList.of( DSL.named( - "avg(integer_value)", - DSL.avg(DSL.ref("integer_value", INTEGER)))), + "avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), // Aggregator in Sort AST node is replaced with reference by expression optimizer Pair.of(SortOption.DEFAULT_ASC, DSL.ref("avg(integer_value)", DOUBLE))), @@ -1081,12 +953,10 @@ public void sort_with_aggregator() { AstDSL.relation("test"), ImmutableList.of( AstDSL.alias( - "avg(integer_value)", - function("avg", qualifiedName("integer_value")))), + "avg(integer_value)", function("avg", qualifiedName("integer_value")))), emptyList(), ImmutableList.of(AstDSL.alias("string_value", qualifiedName("string_value"))), - emptyList() - ), + emptyList()), field( function("avg", qualifiedName("integer_value")), argument("asc", booleanLiteral(true)))), @@ -1098,40 +968,49 @@ public void sort_with_aggregator() { public void sort_with_options() { ImmutableMap argOptions = ImmutableMap.builder() - .put(new Argument[] {argument("asc", booleanLiteral(true))}, + .put( + new Argument[] {argument("asc", booleanLiteral(true))}, new SortOption(SortOrder.ASC, NullOrder.NULL_FIRST)) - .put(new Argument[] {argument("asc", booleanLiteral(false))}, + .put( + new Argument[] {argument("asc", booleanLiteral(false))}, new SortOption(SortOrder.DESC, NullOrder.NULL_LAST)) - .put(new Argument[] { - argument("asc", booleanLiteral(true)), - argument("nullFirst", booleanLiteral(true))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(true)), argument("nullFirst", booleanLiteral(true)) + }, new SortOption(SortOrder.ASC, NullOrder.NULL_FIRST)) - .put(new Argument[] { - argument("asc", booleanLiteral(true)), - argument("nullFirst", booleanLiteral(false))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(true)), + argument("nullFirst", booleanLiteral(false)) + }, new SortOption(SortOrder.ASC, NullOrder.NULL_LAST)) - .put(new Argument[] { - argument("asc", booleanLiteral(false)), - argument("nullFirst", booleanLiteral(true))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(false)), + argument("nullFirst", booleanLiteral(true)) + }, new SortOption(SortOrder.DESC, NullOrder.NULL_FIRST)) - .put(new Argument[] { - argument("asc", booleanLiteral(false)), - argument("nullFirst", booleanLiteral(false))}, + .put( + new Argument[] { + argument("asc", booleanLiteral(false)), + argument("nullFirst", booleanLiteral(false)) + }, new SortOption(SortOrder.DESC, NullOrder.NULL_LAST)) .build(); - argOptions.forEach((args, expectOption) -> - assertAnalyzeEqual( - LogicalPlanDSL.project( - LogicalPlanDSL.sort( - LogicalPlanDSL.relation("test", table), - Pair.of(expectOption, DSL.ref("integer_value", INTEGER))), - DSL.named("string_value", DSL.ref("string_value", STRING))), - AstDSL.project( - AstDSL.sort( - AstDSL.relation("test"), - field(qualifiedName("integer_value"), args)), - AstDSL.alias("string_value", qualifiedName("string_value"))))); + argOptions.forEach( + (args, expectOption) -> + assertAnalyzeEqual( + LogicalPlanDSL.project( + LogicalPlanDSL.sort( + LogicalPlanDSL.relation("test", table), + Pair.of(expectOption, DSL.ref("integer_value", INTEGER))), + DSL.named("string_value", DSL.ref("string_value", STRING))), + AstDSL.project( + AstDSL.sort( + AstDSL.relation("test"), field(qualifiedName("integer_value"), args)), + AstDSL.alias("string_value", qualifiedName("string_value"))))); } @SuppressWarnings("unchecked") @@ -1156,7 +1035,8 @@ public void window_function() { AstDSL.project( AstDSL.relation("test"), AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")), - AstDSL.alias("window_function", + AstDSL.alias( + "window_function", AstDSL.window( AstDSL.function("row_number"), Collections.singletonList(AstDSL.qualifiedName("string_value")), @@ -1164,11 +1044,7 @@ public void window_function() { ImmutablePair.of(DEFAULT_ASC, AstDSL.qualifiedName("integer_value"))))))); } - /** - * SELECT name FROM ( - * SELECT name, age FROM test - * ) AS schema. - */ + /** SELECT name FROM ( SELECT name, age FROM test ) AS schema. */ @Test public void from_subquery() { assertAnalyzeEqual( @@ -1176,29 +1052,19 @@ public void from_subquery() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("string_value", DSL.ref("string_value", STRING)), - DSL.named("integer_value", DSL.ref("integer_value", INTEGER)) - ), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER))), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.project( AstDSL.relationSubquery( AstDSL.project( AstDSL.relation("schema"), AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")), - AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value")) - ), - "schema" - ), - AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")) - ) - ); + AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"))), + "schema"), + AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")))); } - /** - * SELECT * FROM ( - * SELECT name FROM test - * ) AS schema. - */ + /** SELECT * FROM ( SELECT name FROM test ) AS schema. */ @Test public void select_all_from_subquery() { assertAnalyzeEqual( @@ -1206,147 +1072,130 @@ public void select_all_from_subquery() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("string_value", DSL.ref("string_value", STRING))), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.project( AstDSL.relationSubquery( AstDSL.project( AstDSL.relation("schema"), - AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")) - ), - "schema" - ), - AstDSL.allFields() - ) - ); + AstDSL.alias("string_value", AstDSL.qualifiedName("string_value"))), + "schema"), + AstDSL.allFields())); } /** - * Ensure Nested function falls back to legacy engine when used in GROUP BY clause. - * TODO Remove this test when support is added. + * Ensure Nested function falls back to legacy engine when used in GROUP BY clause. TODO Remove + * this test when support is added. */ @Test public void nested_group_by_clause_throws_syntax_exception() { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> analyze( - AstDSL.project( - AstDSL.agg( - AstDSL.relation("schema"), - emptyList(), - emptyList(), - ImmutableList.of(alias("nested(message.info)", - function("nested", - qualifiedName("message", "info")))), - emptyList() - ))) - ); - assertEquals("Falling back to legacy engine. Nested function is not supported in WHERE," + SyntaxCheckException exception = + assertThrows( + SyntaxCheckException.class, + () -> + analyze( + AstDSL.project( + AstDSL.agg( + AstDSL.relation("schema"), + emptyList(), + emptyList(), + ImmutableList.of( + alias( + "nested(message.info)", + function("nested", qualifiedName("message", "info")))), + emptyList())))); + assertEquals( + "Falling back to legacy engine. Nested function is not supported in WHERE," + " GROUP BY, and HAVING clauses.", exception.getMessage()); } - /** - * SELECT name, AVG(age) FROM test GROUP BY name. - */ + /** SELECT name, AVG(age) FROM test GROUP BY name. */ @Test public void sql_group_by_field() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL - .named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), DSL.named("string_value", DSL.ref("string_value", STRING)), DSL.named("AVG(integer_value)", DSL.ref("AVG(integer_value)", DOUBLE))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), - ImmutableList.of(alias("AVG(integer_value)", - aggregate("AVG", qualifiedName("integer_value")))), + ImmutableList.of( + alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))), emptyList(), ImmutableList.of(alias("string_value", qualifiedName("string_value"))), emptyList()), AstDSL.alias("string_value", qualifiedName("string_value")), - AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))) - ); + AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value"))))); } - /** - * SELECT abs(name), AVG(age) FROM test GROUP BY abs(name). - */ + /** SELECT abs(name), AVG(age) FROM test GROUP BY abs(name). */ @Test public void sql_group_by_function() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL - .named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("abs(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("abs(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("abs(long_value)", LONG)), DSL.named("AVG(integer_value)", DSL.ref("AVG(integer_value)", DOUBLE))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), - ImmutableList.of(alias("AVG(integer_value)", - aggregate("AVG", qualifiedName("integer_value")))), + ImmutableList.of( + alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("abs(long_value)", function("abs", qualifiedName("long_value")))), + ImmutableList.of( + alias("abs(long_value)", function("abs", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))) - ); + AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value"))))); } - /** - * SELECT abs(name), AVG(age) FROM test GROUP BY ABS(name). - */ + /** SELECT abs(name), AVG(age) FROM test GROUP BY ABS(name). */ @Test public void sql_group_by_function_in_uppercase() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL - .named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("ABS(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("AVG(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("ABS(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("ABS(long_value)", LONG)), DSL.named("AVG(integer_value)", DSL.ref("AVG(integer_value)", DOUBLE))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), - ImmutableList.of(alias("AVG(integer_value)", - aggregate("AVG", qualifiedName("integer_value")))), + ImmutableList.of( + alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("ABS(long_value)", function("ABS", qualifiedName("long_value")))), + ImmutableList.of( + alias("ABS(long_value)", function("ABS", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value")))) - ); + AstDSL.alias("AVG(integer_value)", aggregate("AVG", qualifiedName("integer_value"))))); } - /** - * SELECT abs(name), abs(avg(age) FROM test GROUP BY abs(name). - */ + /** SELECT abs(name), abs(avg(age) FROM test GROUP BY abs(name). */ @Test public void sql_expression_over_one_aggregation() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("avg(integer_value)", - DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("abs(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("abs(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("abs(long_value)", LONG)), DSL.named("abs(avg(integer_value)", DSL.abs(DSL.ref("avg(integer_value)", DOUBLE)))), AstDSL.project( @@ -1355,34 +1204,32 @@ public void sql_expression_over_one_aggregation() { ImmutableList.of( alias("avg(integer_value)", aggregate("avg", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("abs(long_value)", function("abs", qualifiedName("long_value")))), + ImmutableList.of( + alias("abs(long_value)", function("abs", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("abs(avg(integer_value)", - function("abs", aggregate("avg", qualifiedName("integer_value"))))) - ); + AstDSL.alias( + "abs(avg(integer_value)", + function("abs", aggregate("avg", qualifiedName("integer_value")))))); } - /** - * SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name). - */ + /** SELECT abs(name), sum(age)-avg(age) FROM test GROUP BY abs(name). */ @Test public void sql_expression_over_two_aggregation() { assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("sum(integer_value)", - DSL.sum(DSL.ref("integer_value", INTEGER))), - DSL.named("avg(integer_value)", - DSL.avg(DSL.ref("integer_value", INTEGER)))), - ImmutableList.of(DSL.named("abs(long_value)", - DSL.abs(DSL.ref("long_value", LONG))))), + ImmutableList.of( + DSL.named("sum(integer_value)", DSL.sum(DSL.ref("integer_value", INTEGER))), + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("abs(long_value)", DSL.abs(DSL.ref("long_value", LONG))))), DSL.named("abs(long_value)", DSL.ref("abs(long_value)", LONG)), - DSL.named("sum(integer_value)-avg(integer_value)", - DSL.subtract(DSL.ref("sum(integer_value)", INTEGER), + DSL.named( + "sum(integer_value)-avg(integer_value)", + DSL.subtract( + DSL.ref("sum(integer_value)", INTEGER), DSL.ref("avg(integer_value)", DOUBLE)))), AstDSL.project( AstDSL.agg( @@ -1391,40 +1238,33 @@ public void sql_expression_over_two_aggregation() { alias("sum(integer_value)", aggregate("sum", qualifiedName("integer_value"))), alias("avg(integer_value)", aggregate("avg", qualifiedName("integer_value")))), emptyList(), - ImmutableList - .of(alias("abs(long_value)", function("abs", qualifiedName("long_value")))), + ImmutableList.of( + alias("abs(long_value)", function("abs", qualifiedName("long_value")))), emptyList()), AstDSL.alias("abs(long_value)", function("abs", qualifiedName("long_value"))), - AstDSL.alias("sum(integer_value)-avg(integer_value)", - function("-", aggregate("sum", qualifiedName("integer_value")), - aggregate("avg", qualifiedName("integer_value"))))) - ); + AstDSL.alias( + "sum(integer_value)-avg(integer_value)", + function( + "-", + aggregate("sum", qualifiedName("integer_value")), + aggregate("avg", qualifiedName("integer_value")))))); } @Test public void limit_offset() { assertAnalyzeEqual( LogicalPlanDSL.project( - LogicalPlanDSL.limit( - LogicalPlanDSL.relation("schema", table), - 1, 1 - ), - DSL.named("integer_value", DSL.ref("integer_value", INTEGER)) - ), + LogicalPlanDSL.limit(LogicalPlanDSL.relation("schema", table), 1, 1), + DSL.named("integer_value", DSL.ref("integer_value", INTEGER))), AstDSL.project( - AstDSL.limit( - AstDSL.relation("schema"), - 1, 1 - ), - AstDSL.alias("integer_value", qualifiedName("integer_value")) - ) - ); + AstDSL.limit(AstDSL.relation("schema"), 1, 1), + AstDSL.alias("integer_value", qualifiedName("integer_value")))); } /** - * SELECT COUNT(NAME) FILTER(WHERE age > 1) FROM test. - * This test is to verify that the aggregator properties are taken - * when wrapping it to {@link org.opensearch.sql.expression.aggregation.NamedAggregator} + * SELECT COUNT(NAME) FILTER(WHERE age > 1) FROM test. This test is to verify that the aggregator + * properties are taken when wrapping it to {@link + * org.opensearch.sql.expression.aggregation.NamedAggregator} */ @Test public void named_aggregator_with_condition() { @@ -1433,36 +1273,37 @@ public void named_aggregator_with_condition() { LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), ImmutableList.of( - DSL.named("count(string_value) filter(where integer_value > 1)", - DSL.count(DSL.ref("string_value", STRING)).condition(DSL.greater(DSL.ref( - "integer_value", INTEGER), DSL.literal(1)))) - ), - emptyList() - ), - DSL.named("count(string_value) filter(where integer_value > 1)", DSL.ref( - "count(string_value) filter(where integer_value > 1)", INTEGER)) - ), + DSL.named( + "count(string_value) filter(where integer_value > 1)", + DSL.count(DSL.ref("string_value", STRING)) + .condition( + DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))))), + emptyList()), + DSL.named( + "count(string_value) filter(where integer_value > 1)", + DSL.ref("count(string_value) filter(where integer_value > 1)", INTEGER))), AstDSL.project( AstDSL.agg( AstDSL.relation("schema"), ImmutableList.of( - alias("count(string_value) filter(where integer_value > 1)", filteredAggregate( - "count", qualifiedName("string_value"), function( - ">", qualifiedName("integer_value"), intLiteral(1))))), + alias( + "count(string_value) filter(where integer_value > 1)", + filteredAggregate( + "count", + qualifiedName("string_value"), + function(">", qualifiedName("integer_value"), intLiteral(1))))), emptyList(), emptyList(), - emptyList() - ), - AstDSL.alias("count(string_value) filter(where integer_value > 1)", filteredAggregate( - "count", qualifiedName("string_value"), function( - ">", qualifiedName("integer_value"), intLiteral(1)))) - ) - ); + emptyList()), + AstDSL.alias( + "count(string_value) filter(where integer_value > 1)", + filteredAggregate( + "count", + qualifiedName("string_value"), + function(">", qualifiedName("integer_value"), intLiteral(1)))))); } - /** - * stats avg(integer_value) by string_value span(long_value, 10). - */ + /** stats avg(integer_value) by string_value span(long_value, 10). */ @Test public void ppl_stats_by_fieldAndSpan() { assertAnalyzeEqual( @@ -1489,10 +1330,13 @@ public void parse_relation_with_grok_expression() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("grok_field", - DSL.grok(DSL.ref("string_value", STRING), DSL.literal("%{IPV4:grok_field}"), - DSL.literal("grok_field")))) - ), + ImmutableList.of( + DSL.named( + "grok_field", + DSL.grok( + DSL.ref("string_value", STRING), + DSL.literal("%{IPV4:grok_field}"), + DSL.literal("grok_field"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1500,8 +1344,7 @@ public void parse_relation_with_grok_expression() { AstDSL.field("string_value"), AstDSL.stringLiteral("%{IPV4:grok_field}"), ImmutableMap.of()), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test @@ -1510,10 +1353,13 @@ public void parse_relation_with_regex_expression() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("group", - DSL.regex(DSL.ref("string_value", STRING), DSL.literal("(?.*)"), - DSL.literal("group")))) - ), + ImmutableList.of( + DSL.named( + "group", + DSL.regex( + DSL.ref("string_value", STRING), + DSL.literal("(?.*)"), + DSL.literal("group"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1521,25 +1367,28 @@ public void parse_relation_with_regex_expression() { AstDSL.field("string_value"), AstDSL.stringLiteral("(?.*)"), ImmutableMap.of()), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test public void parse_relation_with_patterns_expression() { - Map arguments = ImmutableMap.builder() - .put("new_field", AstDSL.stringLiteral("custom_field")) - .put("pattern", AstDSL.stringLiteral("custom_pattern")) - .build(); + Map arguments = + ImmutableMap.builder() + .put("new_field", AstDSL.stringLiteral("custom_field")) + .put("pattern", AstDSL.stringLiteral("custom_pattern")) + .build(); assertAnalyzeEqual( LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("custom_field", - DSL.patterns(DSL.ref("string_value", STRING), DSL.literal("custom_pattern"), - DSL.literal("custom_field")))) - ), + ImmutableList.of( + DSL.named( + "custom_field", + DSL.patterns( + DSL.ref("string_value", STRING), + DSL.literal("custom_pattern"), + DSL.literal("custom_field"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1547,8 +1396,7 @@ public void parse_relation_with_patterns_expression() { AstDSL.field("string_value"), AstDSL.stringLiteral("custom_pattern"), arguments), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test @@ -1557,10 +1405,13 @@ public void parse_relation_with_patterns_expression_no_args() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING))), - ImmutableList.of(DSL.named("patterns_field", - DSL.patterns(DSL.ref("string_value", STRING), DSL.literal(""), - DSL.literal("patterns_field")))) - ), + ImmutableList.of( + DSL.named( + "patterns_field", + DSL.patterns( + DSL.ref("string_value", STRING), + DSL.literal(""), + DSL.literal("patterns_field"))))), AstDSL.project( AstDSL.parse( AstDSL.relation("schema"), @@ -1568,89 +1419,109 @@ public void parse_relation_with_patterns_expression_no_args() { AstDSL.field("string_value"), AstDSL.stringLiteral(""), ImmutableMap.of()), - AstDSL.alias("string_value", qualifiedName("string_value")) - )); + AstDSL.alias("string_value", qualifiedName("string_value")))); } @Test public void kmeanns_relation() { - Map argumentMap = new HashMap() {{ - put("centroids", new Literal(3, DataType.INTEGER)); - put("iterations", new Literal(2, DataType.INTEGER)); - put("distance_type", new Literal("COSINE", DataType.STRING)); - }}; + Map argumentMap = + new HashMap() { + { + put("centroids", new Literal(3, DataType.INTEGER)); + put("iterations", new Literal(2, DataType.INTEGER)); + put("distance_type", new Literal("COSINE", DataType.STRING)); + } + }; assertAnalyzeEqual( - new LogicalMLCommons(LogicalPlanDSL.relation("schema", table), - "kmeans", argumentMap), - new Kmeans(AstDSL.relation("schema"), argumentMap) - ); + new LogicalMLCommons(LogicalPlanDSL.relation("schema", table), "kmeans", argumentMap), + new Kmeans(AstDSL.relation("schema"), argumentMap)); } @Test public void ad_batchRCF_relation() { Map argumentMap = - new HashMap() {{ + new HashMap() { + { put("shingle_size", new Literal(8, DataType.INTEGER)); - }}; + } + }; assertAnalyzeEqual( new LogicalAD(LogicalPlanDSL.relation("schema", table), argumentMap), - new AD(AstDSL.relation("schema"), argumentMap) - ); + new AD(AstDSL.relation("schema"), argumentMap)); } @Test public void ad_fitRCF_relation() { - Map argumentMap = new HashMap() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal("timestamp", DataType.STRING)); - }}; + Map argumentMap = + new HashMap() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal("timestamp", DataType.STRING)); + } + }; assertAnalyzeEqual( - new LogicalAD(LogicalPlanDSL.relation("schema", table), - argumentMap), - new AD(AstDSL.relation("schema"), argumentMap) - ); + new LogicalAD(LogicalPlanDSL.relation("schema", table), argumentMap), + new AD(AstDSL.relation("schema"), argumentMap)); } @Test public void ad_fitRCF_relation_with_time_field() { - Map argumentMap = new HashMap() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal("ts", DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal("ts", DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 3); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("score", DSL.ref("score", DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("anomaly_grade", DSL.ref("anomaly_grade", DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("ts", DSL.ref("ts", TIMESTAMP)))); } @Test public void ad_fitRCF_relation_without_time_field() { - Map argumentMap = new HashMap<>() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - }}; + Map argumentMap = + new HashMap<>() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + } + }; - LogicalPlan actual = analyze(AstDSL.project( - new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + LogicalPlan actual = + analyze(AstDSL.project(new AD(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("score", DSL.ref("score", DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("anomalous", DSL.ref("anomalous", BOOLEAN)))); } @Test public void table_function() { - assertAnalyzeEqual(new LogicalRelation("query_range", table), - AstDSL.tableFunction(List.of("prometheus", "query_range"), + assertAnalyzeEqual( + new LogicalRelation("query_range", table), + AstDSL.tableFunction( + List.of("prometheus", "query_range"), unresolvedArg("query", stringLiteral("http_latency")), unresolvedArg("starttime", intLiteral(12345)), unresolvedArg("endtime", intLiteral(12345)), @@ -1659,158 +1530,214 @@ public void table_function() { @Test public void table_function_with_no_datasource() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> analyze(AstDSL.tableFunction(List.of("query_range"), - unresolvedArg("query", stringLiteral("http_latency")), - unresolvedArg("", intLiteral(12345)), - unresolvedArg("", intLiteral(12345)), - unresolvedArg(null, intLiteral(14))))); - assertEquals("unsupported function name: query_range", - exception.getMessage()); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + analyze( + AstDSL.tableFunction( + List.of("query_range"), + unresolvedArg("query", stringLiteral("http_latency")), + unresolvedArg("", intLiteral(12345)), + unresolvedArg("", intLiteral(12345)), + unresolvedArg(null, intLiteral(14))))); + assertEquals("unsupported function name: query_range", exception.getMessage()); } @Test public void table_function_with_wrong_datasource() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> analyze(AstDSL.tableFunction(Arrays.asList("prome", "query_range"), - unresolvedArg("query", stringLiteral("http_latency")), - unresolvedArg("", intLiteral(12345)), - unresolvedArg("", intLiteral(12345)), - unresolvedArg(null, intLiteral(14))))); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + analyze( + AstDSL.tableFunction( + Arrays.asList("prome", "query_range"), + unresolvedArg("query", stringLiteral("http_latency")), + unresolvedArg("", intLiteral(12345)), + unresolvedArg("", intLiteral(12345)), + unresolvedArg(null, intLiteral(14))))); assertEquals("unsupported function name: prome.query_range", exception.getMessage()); } @Test public void table_function_with_wrong_table_function() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> analyze(AstDSL.tableFunction(Arrays.asList("prometheus", "queryrange"), - unresolvedArg("query", stringLiteral("http_latency")), - unresolvedArg("", intLiteral(12345)), - unresolvedArg("", intLiteral(12345)), - unresolvedArg(null, intLiteral(14))))); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> + analyze( + AstDSL.tableFunction( + Arrays.asList("prometheus", "queryrange"), + unresolvedArg("query", stringLiteral("http_latency")), + unresolvedArg("", intLiteral(12345)), + unresolvedArg("", intLiteral(12345)), + unresolvedArg(null, intLiteral(14))))); assertEquals("unsupported function name: queryrange", exception.getMessage()); } @Test public void show_datasources() { - assertAnalyzeEqual(new LogicalRelation(DATASOURCES_TABLE_NAME, - new DataSourceTable(dataSourceService)), + assertAnalyzeEqual( + new LogicalRelation(DATASOURCES_TABLE_NAME, new DataSourceTable(dataSourceService)), AstDSL.relation(qualifiedName(DATASOURCES_TABLE_NAME))); } @Test public void ml_relation_unsupported_action() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal("unsupported", DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - }}; + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal("unsupported", DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + } + }; IllegalArgumentException exception = - assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.project( + new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); assertEquals( - "Action error. Please indicate train, predict or trainandpredict.", - exception.getMessage()); + "Action error. Please indicate train, predict or trainandpredict.", exception.getMessage()); } @Test public void ml_relation_unsupported_algorithm() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal("unsupported", DataType.STRING)); - }}; + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal("unsupported", DataType.STRING)); + } + }; IllegalArgumentException exception = - assertThrows( - IllegalArgumentException.class, - () -> analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); - assertEquals( - "Unsupported algorithm: unsupported", - exception.getMessage()); + assertThrows( + IllegalArgumentException.class, + () -> + analyze( + AstDSL.project( + new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields()))); + assertEquals("Unsupported algorithm: unsupported", exception.getMessage()); } @Test public void ml_relation_train_sync() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(TRAIN, DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(TRAIN, DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(STATUS, DSL.ref(STATUS, STRING)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(MODELID, DSL.ref(MODELID, STRING)))); } @Test public void ml_relation_train_async() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(TRAIN, DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - put(ASYNC, new Literal(true, DataType.BOOLEAN)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(TRAIN, DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + put(ASYNC, new Literal(true, DataType.BOOLEAN)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(STATUS, DSL.ref(STATUS, STRING)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(TASKID, DSL.ref(TASKID, STRING)))); } @Test public void ml_relation_predict_kmeans() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal(KMEANS, DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal(KMEANS, DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 1); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(CLUSTERID, DSL.ref(CLUSTERID, INTEGER)))); } @Test public void ml_relation_predict_rcf_with_time_field() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal(RCF, DataType.STRING)); - put(RCF_TIME_FIELD, new Literal("ts", DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal(RCF, DataType.STRING)); + put(RCF_TIME_FIELD, new Literal("ts", DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 3); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_SCORE, DSL.ref(RCF_SCORE, DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_ANOMALY_GRADE, DSL.ref(RCF_ANOMALY_GRADE, DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named("ts", DSL.ref("ts", TIMESTAMP)))); } @Test public void ml_relation_predict_rcf_without_time_field() { - Map argumentMap = new HashMap<>() {{ - put(ACTION, new Literal(PREDICT, DataType.STRING)); - put(ALGO, new Literal(RCF, DataType.STRING)); - }}; - - LogicalPlan actual = analyze(AstDSL.project( - new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); + Map argumentMap = + new HashMap<>() { + { + put(ACTION, new Literal(PREDICT, DataType.STRING)); + put(ALGO, new Literal(RCF, DataType.STRING)); + } + }; + + LogicalPlan actual = + analyze(AstDSL.project(new ML(AstDSL.relation("schema"), argumentMap), AstDSL.allFields())); assertTrue(((LogicalProject) actual).getProjectList().size() >= 2); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_SCORE, DSL.ref(RCF_SCORE, DOUBLE)))); - assertTrue(((LogicalProject) actual).getProjectList() + assertTrue( + ((LogicalProject) actual) + .getProjectList() .contains(DSL.named(RCF_ANOMALOUS, DSL.ref(RCF_ANOMALOUS, BOOLEAN)))); } @@ -1825,8 +1752,10 @@ public void visit_paginate() { void visit_cursor() { LogicalPlan actual = analyze((new FetchCursor("test"))); assertTrue(actual instanceof LogicalFetchCursor); - assertEquals(new LogicalFetchCursor("test", - dataSourceService.getDataSource("@opensearch").getStorageEngine()), actual); + assertEquals( + new LogicalFetchCursor( + "test", dataSourceService.getDataSource("@opensearch").getStorageEngine()), + actual); } @Test @@ -1835,7 +1764,7 @@ public void visit_close_cursor() { assertAll( () -> assertTrue(analyzed instanceof LogicalCloseCursor), () -> assertTrue(analyzed.getChild().get(0) instanceof LogicalFetchCursor), - () -> assertEquals("pewpew", ((LogicalFetchCursor) analyzed.getChild().get(0)).getCursor()) - ); + () -> + assertEquals("pewpew", ((LogicalFetchCursor) analyzed.getChild().get(0)).getCursor())); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java index b6e2600041..f09bc5d380 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -47,7 +46,6 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; - public class AnalyzerTestBase { protected Map typeMapping() { @@ -92,31 +90,34 @@ public Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableNam } protected Table table() { - return Optional.ofNullable(table).orElseGet(() -> new Table() { - @Override - public boolean exists() { - return true; - } - - @Override - public void create(Map schema) { - throw new UnsupportedOperationException("Create table is not supported"); - } - - @Override - public Map getFieldTypes() { - return typeMapping(); - } - - @Override - public PhysicalPlan implement(LogicalPlan plan) { - throw new UnsupportedOperationException(); - } - - public Map getReservedFieldTypes() { - return ImmutableMap.of("_test", STRING); - } - }); + return Optional.ofNullable(table) + .orElseGet( + () -> + new Table() { + @Override + public boolean exists() { + return true; + } + + @Override + public void create(Map schema) { + throw new UnsupportedOperationException("Create table is not supported"); + } + + @Override + public Map getFieldTypes() { + return typeMapping(); + } + + @Override + public PhysicalPlan implement(LogicalPlan plan) { + throw new UnsupportedOperationException(); + } + + public Map getReservedFieldTypes() { + return ImmutableMap.of("_test", STRING); + } + }); } protected DataSourceService dataSourceService() { @@ -125,10 +126,12 @@ protected DataSourceService dataSourceService() { protected SymbolTable symbolTable() { SymbolTable symbolTable = new SymbolTable(); - typeMapping().entrySet() + typeMapping() + .entrySet() .forEach( - entry -> symbolTable - .store(new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); + entry -> + symbolTable.store( + new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); return symbolTable; } @@ -154,8 +157,8 @@ protected Environment typeEnv() { protected Analyzer analyzer = analyzer(expressionAnalyzer(), dataSourceService); - protected Analyzer analyzer(ExpressionAnalyzer expressionAnalyzer, - DataSourceService dataSourceService) { + protected Analyzer analyzer( + ExpressionAnalyzer expressionAnalyzer, DataSourceService dataSourceService) { BuiltinFunctionRepository functionRepository = BuiltinFunctionRepository.getInstance(); return new Analyzer(expressionAnalyzer, dataSourceService, functionRepository); } @@ -182,18 +185,22 @@ protected LogicalPlan analyze(UnresolvedPlan unresolvedPlan) { private class DefaultDataSourceService implements DataSourceService { - private final DataSource opensearchDataSource = new DataSource(DEFAULT_DATASOURCE_NAME, - DataSourceType.OPENSEARCH, storageEngine()); - private final DataSource prometheusDataSource - = new DataSource("prometheus", DataSourceType.PROMETHEUS, prometheusStorageEngine()); - + private final DataSource opensearchDataSource = + new DataSource(DEFAULT_DATASOURCE_NAME, DataSourceType.OPENSEARCH, storageEngine()); + private final DataSource prometheusDataSource = + new DataSource("prometheus", DataSourceType.PROMETHEUS, prometheusStorageEngine()); @Override public Set getDataSourceMetadata(boolean isDefaultDataSourceRequired) { return Stream.of(opensearchDataSource, prometheusDataSource) - .map(ds -> new DataSourceMetadata(ds.getName(), - ds.getConnectorType(),Collections.emptyList(), - ImmutableMap.of())).collect(Collectors.toSet()); + .map( + ds -> + new DataSourceMetadata( + ds.getName(), + ds.getConnectorType(), + Collections.emptyList(), + ImmutableMap.of())) + .collect(Collectors.toSet()); } @Override @@ -216,18 +223,14 @@ public DataSource getDataSource(String dataSourceName) { } @Override - public void updateDataSource(DataSourceMetadata dataSourceMetadata) { - - } + public void updateDataSource(DataSourceMetadata dataSourceMetadata) {} @Override - public void deleteDataSource(String dataSourceName) { - } + public void deleteDataSource(String dataSourceName) {} @Override public Boolean dataSourceExists(String dataSourceName) { - return dataSourceName.equals(DEFAULT_DATASOURCE_NAME) - || dataSourceName.equals("prometheus"); + return dataSourceName.equals(DEFAULT_DATASOURCE_NAME) || dataSourceName.equals("prometheus"); } } @@ -239,8 +242,8 @@ private class TestTableFunctionImplementation implements TableFunctionImplementa private Table table; - public TestTableFunctionImplementation(FunctionName functionName, List arguments, - Table table) { + public TestTableFunctionImplementation( + FunctionName functionName, List arguments, Table table) { this.functionName = functionName; this.arguments = arguments; this.table = table; diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java index 5a05c79132..b27b8348e2 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static java.util.Collections.emptyList; @@ -57,64 +56,50 @@ class ExpressionAnalyzerTest extends AnalyzerTestBase { public void equal() { assertAnalyzeEqual( DSL.equal(DSL.ref("integer_value", INTEGER), DSL.literal(integerValue(1))), - AstDSL.equalTo(AstDSL.unresolvedAttr("integer_value"), AstDSL.intLiteral(1)) - ); + AstDSL.equalTo(AstDSL.unresolvedAttr("integer_value"), AstDSL.intLiteral(1))); } @Test public void and() { assertAnalyzeEqual( DSL.and(DSL.ref("boolean_value", BOOLEAN), DSL.literal(LITERAL_TRUE)), - AstDSL.and(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true)) - ); + AstDSL.and(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true))); } @Test public void or() { assertAnalyzeEqual( DSL.or(DSL.ref("boolean_value", BOOLEAN), DSL.literal(LITERAL_TRUE)), - AstDSL.or(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true)) - ); + AstDSL.or(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true))); } @Test public void xor() { assertAnalyzeEqual( DSL.xor(DSL.ref("boolean_value", BOOLEAN), DSL.literal(LITERAL_TRUE)), - AstDSL.xor(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true)) - ); + AstDSL.xor(AstDSL.unresolvedAttr("boolean_value"), AstDSL.booleanLiteral(true))); } @Test public void not() { assertAnalyzeEqual( DSL.not(DSL.ref("boolean_value", BOOLEAN)), - AstDSL.not(AstDSL.unresolvedAttr("boolean_value")) - ); + AstDSL.not(AstDSL.unresolvedAttr("boolean_value"))); } @Test public void qualified_name() { - assertAnalyzeEqual( - DSL.ref("integer_value", INTEGER), - qualifiedName("integer_value") - ); + assertAnalyzeEqual(DSL.ref("integer_value", INTEGER), qualifiedName("integer_value")); } @Test public void between() { assertAnalyzeEqual( DSL.and( - DSL.gte( - DSL.ref("integer_value", INTEGER), - DSL.literal(20)), - DSL.lte( - DSL.ref("integer_value", INTEGER), - DSL.literal(30))), + DSL.gte(DSL.ref("integer_value", INTEGER), DSL.literal(20)), + DSL.lte(DSL.ref("integer_value", INTEGER), DSL.literal(30))), AstDSL.between( - qualifiedName("integer_value"), - AstDSL.intLiteral(20), - AstDSL.intLiteral(30))); + qualifiedName("integer_value"), AstDSL.intLiteral(20), AstDSL.intLiteral(30))); } @Test @@ -149,36 +134,38 @@ public void case_conditions() { AstDSL.caseWhen( null, AstDSL.when( - AstDSL.function(">", - qualifiedName("integer_value"), - AstDSL.intLiteral(50)), AstDSL.stringLiteral("Fifty")), + AstDSL.function(">", qualifiedName("integer_value"), AstDSL.intLiteral(50)), + AstDSL.stringLiteral("Fifty")), AstDSL.when( - AstDSL.function(">", - qualifiedName("integer_value"), - AstDSL.intLiteral(30)), AstDSL.stringLiteral("Thirty")))); + AstDSL.function(">", qualifiedName("integer_value"), AstDSL.intLiteral(30)), + AstDSL.stringLiteral("Thirty")))); } @Test public void castAnalyzer() { assertAnalyzeEqual( DSL.castInt(DSL.ref("boolean_value", BOOLEAN)), - AstDSL.cast(AstDSL.unresolvedAttr("boolean_value"), AstDSL.stringLiteral("INT")) - ); + AstDSL.cast(AstDSL.unresolvedAttr("boolean_value"), AstDSL.stringLiteral("INT"))); - assertThrows(IllegalStateException.class, () -> analyze(AstDSL.cast(AstDSL.unresolvedAttr( - "boolean_value"), AstDSL.stringLiteral("INTERVAL")))); + assertThrows( + IllegalStateException.class, + () -> + analyze( + AstDSL.cast( + AstDSL.unresolvedAttr("boolean_value"), AstDSL.stringLiteral("INTERVAL")))); } @Test public void case_with_default_result_type_different() { - UnresolvedExpression caseWhen = AstDSL.caseWhen( - qualifiedName("integer_value"), - AstDSL.intLiteral(60), - AstDSL.when(AstDSL.intLiteral(30), AstDSL.stringLiteral("Thirty")), - AstDSL.when(AstDSL.intLiteral(50), AstDSL.stringLiteral("Fifty"))); - - SemanticCheckException exception = assertThrows( - SemanticCheckException.class, () -> analyze(caseWhen)); + UnresolvedExpression caseWhen = + AstDSL.caseWhen( + qualifiedName("integer_value"), + AstDSL.intLiteral(60), + AstDSL.when(AstDSL.intLiteral(30), AstDSL.stringLiteral("Thirty")), + AstDSL.when(AstDSL.intLiteral(50), AstDSL.stringLiteral("Fifty"))); + + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, () -> analyze(caseWhen)); assertEquals( "All result types of CASE clause must be the same, but found [STRING, STRING, INTEGER]", exception.getMessage()); @@ -187,8 +174,7 @@ public void case_with_default_result_type_different() { @Test public void scalar_window_function() { assertAnalyzeEqual( - DSL.rank(), - AstDSL.window(AstDSL.function("rank"), emptyList(), emptyList())); + DSL.rank(), AstDSL.window(AstDSL.function("rank"), emptyList(), emptyList())); } @SuppressWarnings("unchecked") @@ -197,9 +183,7 @@ public void aggregate_window_function() { assertAnalyzeEqual( new AggregateWindowFunction(DSL.avg(DSL.ref("integer_value", INTEGER))), AstDSL.window( - AstDSL.aggregate("avg", qualifiedName("integer_value")), - emptyList(), - emptyList())); + AstDSL.aggregate("avg", qualifiedName("integer_value")), emptyList(), emptyList())); } @Test @@ -207,26 +191,24 @@ public void qualified_name_with_qualifier() { analysisContext.push(); analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( - DSL.ref("integer_value", INTEGER), - qualifiedName("index_alias", "integer_value") - ); + DSL.ref("integer_value", INTEGER), qualifiedName("index_alias", "integer_value")); analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "object_field"), STRUCT); - analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "object_field.integer_value"), - INTEGER); + analysisContext + .peek() + .define(new Symbol(Namespace.FIELD_NAME, "object_field.integer_value"), INTEGER); assertAnalyzeEqual( DSL.ref("object_field.integer_value", INTEGER), - qualifiedName("object_field", "integer_value") - ); + qualifiedName("object_field", "integer_value")); SyntaxCheckException exception = - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> analyze(qualifiedName("nested_field", "integer_value"))); assertEquals( "The qualifier [nested_field] of qualified name [nested_field.integer_value] " + "must be an field name, index name or its alias", - exception.getMessage() - ); + exception.getMessage()); analysisContext.pop(); } @@ -234,24 +216,15 @@ public void qualified_name_with_qualifier() { public void qualified_name_with_reserved_symbol() { analysisContext.push(); - analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_reserved"), STRING); - analysisContext.peek().addReservedWord(new Symbol(Namespace.FIELD_NAME, "_priority"), FLOAT); + analysisContext.peek().define(new Symbol(Namespace.HIDDEN_FIELD_NAME, "_reserved"), STRING); + analysisContext.peek().define(new Symbol(Namespace.HIDDEN_FIELD_NAME, "_priority"), FLOAT); analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); - assertAnalyzeEqual( - DSL.ref("_priority", FLOAT), - qualifiedName("_priority") - ); - assertAnalyzeEqual( - DSL.ref("_reserved", STRING), - qualifiedName("index_alias", "_reserved") - ); + assertAnalyzeEqual(DSL.ref("_priority", FLOAT), qualifiedName("_priority")); + assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); - // reserved fields take priority over symbol table + // cannot replace an existing field type analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "_reserved"), LONG); - assertAnalyzeEqual( - DSL.ref("_reserved", STRING), - qualifiedName("index_alias", "_reserved") - ); + assertAnalyzeEqual(DSL.ref("_reserved", STRING), qualifiedName("index_alias", "_reserved")); analysisContext.pop(); } @@ -265,9 +238,7 @@ public void interval() { @Test public void all_fields() { - assertAnalyzeEqual( - DSL.literal("*"), - AllFields.of()); + assertAnalyzeEqual(DSL.literal("*"), AllFields.of()); } @Test @@ -281,25 +252,30 @@ public void case_clause() { AstDSL.caseWhen( AstDSL.nullLiteral(), AstDSL.when( - AstDSL.function("=", - qualifiedName("integer_value"), - AstDSL.intLiteral(30)), + AstDSL.function("=", qualifiedName("integer_value"), AstDSL.intLiteral(30)), AstDSL.stringLiteral("test")))); } @Test public void undefined_var_semantic_check_failed() { - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> analyze( - AstDSL.and(AstDSL.unresolvedAttr("undefined_field"), AstDSL.booleanLiteral(true)))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=undefined_field) in type env", + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> + analyze( + AstDSL.and( + AstDSL.unresolvedAttr("undefined_field"), AstDSL.booleanLiteral(true)))); + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=undefined_field) in type env", exception.getMessage()); } @Test public void undefined_aggregation_function() { - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> analyze(AstDSL.aggregate("ESTDC_ERROR", field("integer_value")))); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> analyze(AstDSL.aggregate("ESTDC_ERROR", field("integer_value")))); assertEquals("Unsupported aggregation function ESTDC_ERROR", exception.getMessage()); } @@ -308,25 +284,24 @@ public void aggregation_filter() { assertAnalyzeEqual( DSL.avg(DSL.ref("integer_value", INTEGER)) .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), - AstDSL.filteredAggregate("avg", qualifiedName("integer_value"), - function(">", qualifiedName("integer_value"), intLiteral(1))) - ); + AstDSL.filteredAggregate( + "avg", + qualifiedName("integer_value"), + function(">", qualifiedName("integer_value"), intLiteral(1)))); } @Test public void variance_mapto_varPop() { assertAnalyzeEqual( DSL.varPop(DSL.ref("integer_value", INTEGER)), - AstDSL.aggregate("variance", qualifiedName("integer_value")) - ); + AstDSL.aggregate("variance", qualifiedName("integer_value"))); } @Test public void distinct_count() { assertAnalyzeEqual( DSL.distinctCount(DSL.ref("integer_value", INTEGER)), - AstDSL.distinctAggregate("count", qualifiedName("integer_value")) - ); + AstDSL.distinctAggregate("count", qualifiedName("integer_value"))); } @Test @@ -334,48 +309,49 @@ public void filtered_distinct_count() { assertAnalyzeEqual( DSL.distinctCount(DSL.ref("integer_value", INTEGER)) .condition(DSL.greater(DSL.ref("integer_value", INTEGER), DSL.literal(1))), - AstDSL.filteredDistinctCount("count", qualifiedName("integer_value"), function( - ">", qualifiedName("integer_value"), intLiteral(1))) - ); + AstDSL.filteredDistinctCount( + "count", + qualifiedName("integer_value"), + function(">", qualifiedName("integer_value"), intLiteral(1)))); } @Test public void take_aggregation() { assertAnalyzeEqual( DSL.take(DSL.ref("string_value", STRING), DSL.literal(10)), - AstDSL.aggregate("take", qualifiedName("string_value"), intLiteral(10)) - ); + AstDSL.aggregate("take", qualifiedName("string_value"), intLiteral(10))); } @Test public void named_argument() { assertAnalyzeEqual( DSL.namedArgument("arg_name", DSL.literal("query")), - AstDSL.unresolvedArg("arg_name", stringLiteral("query")) - ); + AstDSL.unresolvedArg("arg_name", stringLiteral("query"))); } @Test public void named_parse_expression() { analysisContext.push(); analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "string_field"), STRING); - analysisContext.getNamedParseExpressions() - .add(DSL.named("group", - DSL.regex(ref("string_field", STRING), DSL.literal("(?\\d+)"), - DSL.literal("group")))); + analysisContext + .getNamedParseExpressions() + .add( + DSL.named( + "group", + DSL.regex( + ref("string_field", STRING), + DSL.literal("(?\\d+)"), + DSL.literal("group")))); assertAnalyzeEqual( - DSL.regex(ref("string_field", STRING), DSL.literal("(?\\d+)"), - DSL.literal("group")), - qualifiedName("group") - ); + DSL.regex(ref("string_field", STRING), DSL.literal("(?\\d+)"), DSL.literal("group")), + qualifiedName("group")); } @Test public void named_non_parse_expression() { analysisContext.push(); analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "string_field"), STRING); - analysisContext.getNamedParseExpressions() - .add(DSL.named("string_field", DSL.literal("123"))); + analysisContext.getNamedParseExpressions().add(DSL.named("string_field", DSL.literal("123"))); assertAnalyzeEqual(DSL.ref("string_field", STRING), qualifiedName("string_field")); } @@ -385,25 +361,29 @@ void match_bool_prefix_expression() { DSL.match_bool_prefix( DSL.namedArgument("field", DSL.literal("field_value1")), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("match_bool_prefix", + AstDSL.function( + "match_bool_prefix", AstDSL.unresolvedArg("field", stringLiteral("field_value1")), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @Test void match_bool_prefix_wrong_expression() { - assertThrows(SemanticCheckException.class, - () -> analyze(AstDSL.function("match_bool_prefix", - AstDSL.unresolvedArg("field", stringLiteral("fieldA")), - AstDSL.unresolvedArg("query", floatLiteral(1.2f))))); + assertThrows( + SemanticCheckException.class, + () -> + analyze( + AstDSL.function( + "match_bool_prefix", + AstDSL.unresolvedArg("field", stringLiteral("fieldA")), + AstDSL.unresolvedArg("query", floatLiteral(1.2f))))); } @Test void visit_span() { assertAnalyzeEqual( DSL.span(DSL.ref("integer_value", INTEGER), DSL.literal(1), ""), - AstDSL.span(qualifiedName("integer_value"), intLiteral(1), SpanUnit.NONE) - ); + AstDSL.span(qualifiedName("integer_value"), intLiteral(1), SpanUnit.NONE)); } @Test @@ -425,13 +405,16 @@ void visit_in() { void multi_match_expression() { assertAnalyzeEqual( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("multi_match", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "multi_match", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @@ -439,14 +422,17 @@ void multi_match_expression() { void multi_match_expression_with_params() { assertAnalyzeEqual( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query")), DSL.namedArgument("analyzer", DSL.literal("keyword"))), - AstDSL.function("multi_match", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "multi_match", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")), AstDSL.unresolvedArg("analyzer", stringLiteral("keyword")))); } @@ -455,14 +441,20 @@ void multi_match_expression_with_params() { void multi_match_expression_two_fields() { assertAnalyzeEqual( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F), - "field_value2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field_value1", ExprValueUtils.floatValue(1.F), + "field_value2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("multi_match", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field_value1", 1.F, "field_value2", .3F))), + AstDSL.function( + "multi_match", + AstDSL.unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field_value1", 1.F, "field_value2", .3F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @@ -470,13 +462,16 @@ void multi_match_expression_two_fields() { void simple_query_string_expression() { assertAnalyzeEqual( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("simple_query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "simple_query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @@ -484,14 +479,17 @@ void simple_query_string_expression() { void simple_query_string_expression_with_params() { assertAnalyzeEqual( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("sample query")), DSL.namedArgument("analyzer", DSL.literal("keyword"))), - AstDSL.function("simple_query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "simple_query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")), AstDSL.unresolvedArg("analyzer", stringLiteral("keyword")))); } @@ -500,37 +498,44 @@ void simple_query_string_expression_with_params() { void simple_query_string_expression_two_fields() { assertAnalyzeEqual( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F), - "field_value2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field_value1", ExprValueUtils.floatValue(1.F), + "field_value2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("query", DSL.literal("sample query"))), - AstDSL.function("simple_query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field_value1", 1.F, "field_value2", .3F))), + AstDSL.function( + "simple_query_string", + AstDSL.unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field_value1", 1.F, "field_value2", .3F))), AstDSL.unresolvedArg("query", stringLiteral("sample query")))); } @Test void query_expression() { assertAnalyzeEqual( - DSL.query( - DSL.namedArgument("query", DSL.literal("field:query"))), - AstDSL.function("query", - AstDSL.unresolvedArg("query", stringLiteral("field:query")))); + DSL.query(DSL.namedArgument("query", DSL.literal("field:query"))), + AstDSL.function("query", AstDSL.unresolvedArg("query", stringLiteral("field:query")))); } @Test void query_string_expression() { assertAnalyzeEqual( DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("query_value"))), - AstDSL.function("query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("query_value")))); } @@ -538,14 +543,17 @@ void query_string_expression() { void query_string_expression_with_params() { assertAnalyzeEqual( DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field_value1", ExprValueUtils.floatValue(1.F)))))), DSL.namedArgument("query", DSL.literal("query_value")), DSL.namedArgument("escape", DSL.literal("false"))), - AstDSL.function("query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of( - "field_value1", 1.F))), + AstDSL.function( + "query_string", + AstDSL.unresolvedArg("fields", new RelevanceFieldList(Map.of("field_value1", 1.F))), AstDSL.unresolvedArg("query", stringLiteral("query_value")), AstDSL.unresolvedArg("escape", stringLiteral("false")))); } @@ -554,14 +562,20 @@ void query_string_expression_with_params() { void query_string_expression_two_fields() { assertAnalyzeEqual( DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field_value1", ExprValueUtils.floatValue(1.F), - "field_value2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field_value1", ExprValueUtils.floatValue(1.F), + "field_value2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("query", DSL.literal("query_value"))), - AstDSL.function("query_string", - AstDSL.unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field_value1", 1.F, "field_value2", .3F))), + AstDSL.function( + "query_string", + AstDSL.unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field_value1", 1.F, "field_value2", .3F))), AstDSL.unresolvedArg("query", stringLiteral("query_value")))); } @@ -571,7 +585,8 @@ void wildcard_query_expression() { DSL.wildcard_query( DSL.namedArgument("field", DSL.literal("test")), DSL.namedArgument("query", DSL.literal("query_value*"))), - AstDSL.function("wildcard_query", + AstDSL.function( + "wildcard_query", unresolvedArg("field", stringLiteral("test")), unresolvedArg("query", stringLiteral("query_value*")))); } @@ -585,7 +600,8 @@ void wildcard_query_expression_all_params() { DSL.namedArgument("boost", DSL.literal("1.5")), DSL.namedArgument("case_insensitive", DSL.literal("true")), DSL.namedArgument("rewrite", DSL.literal("scoring_boolean"))), - AstDSL.function("wildcard_query", + AstDSL.function( + "wildcard_query", unresolvedArg("field", stringLiteral("test")), unresolvedArg("query", stringLiteral("query_value*")), unresolvedArg("boost", stringLiteral("1.5")), @@ -603,154 +619,144 @@ public void match_phrase_prefix_all_params() { DSL.namedArgument("boost", "1.5"), DSL.namedArgument("analyzer", "standard"), DSL.namedArgument("max_expansions", "4"), - DSL.namedArgument("zero_terms_query", "NONE") - ), - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")), - unresolvedArg("boost", stringLiteral("1.5")), - unresolvedArg("analyzer", stringLiteral("standard")), - unresolvedArg("max_expansions", stringLiteral("4")), - unresolvedArg("zero_terms_query", stringLiteral("NONE")) - ) - ); - } - - @Test void score_function_expression() { - assertAnalyzeEqual( - DSL.score( - DSL.namedArgument("RelevanceQuery", - DSL.match_phrase_prefix( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("slop", "3") - ) - )), - AstDSL.function("score", - unresolvedArg("RelevanceQuery", - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")) - ) - ) - ) - ); - } - - @Test void score_function_with_boost() { - assertAnalyzeEqual( - DSL.score( - DSL.namedArgument("RelevanceQuery", - DSL.match_phrase_prefix( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("boost", "3.0") - )), - DSL.namedArgument("boost", "2") - ), - AstDSL.function("score", - unresolvedArg("RelevanceQuery", - AstDSL.function("match_phrase_prefix", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("boost", stringLiteral("3.0")) - ) - ), - unresolvedArg("boost", stringLiteral("2")) - ) - ); - } - - @Test void score_query_function_expression() { - assertAnalyzeEqual( - DSL.score_query( - DSL.namedArgument("RelevanceQuery", - DSL.wildcard_query( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query") - ) - )), - AstDSL.function("score_query", - unresolvedArg("RelevanceQuery", - AstDSL.function("wildcard_query", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")) - ) - ) - ) - ); - } - - @Test void score_query_function_with_boost() { - assertAnalyzeEqual( - DSL.score_query( - DSL.namedArgument("RelevanceQuery", - DSL.wildcard_query( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query") - ) - ), - DSL.namedArgument("boost", "2.0") - ), - AstDSL.function("score_query", - unresolvedArg("RelevanceQuery", - AstDSL.function("wildcard_query", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")) - ) - ), - unresolvedArg("boost", stringLiteral("2.0")) - ) - ); - } - - @Test void scorequery_function_expression() { - assertAnalyzeEqual( - DSL.scorequery( - DSL.namedArgument("RelevanceQuery", - DSL.simple_query_string( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("slop", "3") - ) - )), - AstDSL.function("scorequery", - unresolvedArg("RelevanceQuery", - AstDSL.function("simple_query_string", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")) - ) - ) - ) - ); + DSL.namedArgument("zero_terms_query", "NONE")), + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")), + unresolvedArg("boost", stringLiteral("1.5")), + unresolvedArg("analyzer", stringLiteral("standard")), + unresolvedArg("max_expansions", stringLiteral("4")), + unresolvedArg("zero_terms_query", stringLiteral("NONE")))); + } + + @Test + void score_function_expression() { + assertAnalyzeEqual( + DSL.score( + DSL.namedArgument( + "RelevanceQuery", + DSL.match_phrase_prefix( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("slop", "3")))), + AstDSL.function( + "score", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")))))); + } + + @Test + void score_function_with_boost() { + assertAnalyzeEqual( + DSL.score( + DSL.namedArgument( + "RelevanceQuery", + DSL.match_phrase_prefix( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("boost", "3.0"))), + DSL.namedArgument("boost", "2")), + AstDSL.function( + "score", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "match_phrase_prefix", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("boost", stringLiteral("3.0")))), + unresolvedArg("boost", stringLiteral("2")))); + } + + @Test + void score_query_function_expression() { + assertAnalyzeEqual( + DSL.score_query( + DSL.namedArgument( + "RelevanceQuery", + DSL.wildcard_query( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query")))), + AstDSL.function( + "score_query", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "wildcard_query", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")))))); + } + + @Test + void score_query_function_with_boost() { + assertAnalyzeEqual( + DSL.score_query( + DSL.namedArgument( + "RelevanceQuery", + DSL.wildcard_query( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"))), + DSL.namedArgument("boost", "2.0")), + AstDSL.function( + "score_query", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "wildcard_query", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")))), + unresolvedArg("boost", stringLiteral("2.0")))); + } + + @Test + void scorequery_function_expression() { + assertAnalyzeEqual( + DSL.scorequery( + DSL.namedArgument( + "RelevanceQuery", + DSL.simple_query_string( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("slop", "3")))), + AstDSL.function( + "scorequery", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "simple_query_string", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")))))); } @Test void scorequery_function_with_boost() { assertAnalyzeEqual( - DSL.scorequery( - DSL.namedArgument("RelevanceQuery", - DSL.simple_query_string( - DSL.namedArgument("field", "field_value1"), - DSL.namedArgument("query", "search query"), - DSL.namedArgument("slop", "3") - )), - DSL.namedArgument("boost", "2.0") - ), - AstDSL.function("scorequery", - unresolvedArg("RelevanceQuery", - AstDSL.function("simple_query_string", - unresolvedArg("field", stringLiteral("field_value1")), - unresolvedArg("query", stringLiteral("search query")), - unresolvedArg("slop", stringLiteral("3")) - ) - ), - unresolvedArg("boost", stringLiteral("2.0")) - ) - ); + DSL.scorequery( + DSL.namedArgument( + "RelevanceQuery", + DSL.simple_query_string( + DSL.namedArgument("field", "field_value1"), + DSL.namedArgument("query", "search query"), + DSL.namedArgument("slop", "3"))), + DSL.namedArgument("boost", "2.0")), + AstDSL.function( + "scorequery", + unresolvedArg( + "RelevanceQuery", + AstDSL.function( + "simple_query_string", + unresolvedArg("field", stringLiteral("field_value1")), + unresolvedArg("query", stringLiteral("search query")), + unresolvedArg("slop", stringLiteral("3")))), + unresolvedArg("boost", stringLiteral("2.0")))); } @Test @@ -764,8 +770,12 @@ public void function_returns_non_constant_value() { // Even a function returns the same values - they are calculated on each call // `sysdate()` which returns `LocalDateTime.now()` shouldn't be cached and should return always // different values - var values = List.of(analyze(function("sysdate")), analyze(function("sysdate")), - analyze(function("sysdate")), analyze(function("sysdate"))); + var values = + List.of( + analyze(function("sysdate")), + analyze(function("sysdate")), + analyze(function("sysdate")), + analyze(function("sysdate"))); var referenceValue = analyze(function("sysdate")).valueOf(); assertTrue(values.stream().noneMatch(v -> v.valueOf() == referenceValue)); } @@ -773,8 +783,12 @@ public void function_returns_non_constant_value() { @Test public void now_as_a_function_not_cached() { // // We can call `now()` as a function, in that case nothing should be cached - var values = List.of(analyze(function("now")), analyze(function("now")), - analyze(function("now")), analyze(function("now"))); + var values = + List.of( + analyze(function("now")), + analyze(function("now")), + analyze(function("now")), + analyze(function("now"))); var referenceValue = analyze(function("now")).valueOf(); assertTrue(values.stream().noneMatch(v -> v.valueOf() == referenceValue)); } @@ -783,13 +797,12 @@ protected Expression analyze(UnresolvedExpression unresolvedExpression) { return expressionAnalyzer.analyze(unresolvedExpression, analysisContext); } - protected void assertAnalyzeEqual(Expression expected, - UnresolvedExpression unresolvedExpression) { + protected void assertAnalyzeEqual( + Expression expected, UnresolvedExpression unresolvedExpression) { assertEquals(expected, analyze(unresolvedExpression)); } - protected void assertAnalyzeEqual(Expression expected, - UnresolvedPlan unresolvedPlan) { + protected void assertAnalyzeEqual(Expression expected, UnresolvedPlan unresolvedPlan) { assertEquals(expected, analyze(unresolvedPlan)); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java index 89d5f699e3..28bcb8793f 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionReferenceOptimizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static java.util.Collections.emptyList; @@ -27,65 +26,58 @@ class ExpressionReferenceOptimizerTest extends AnalyzerTestBase { void expression_without_aggregation_should_not_be_replaced() { assertEquals( DSL.subtract(DSL.ref("age", INTEGER), DSL.literal(1)), - optimize(DSL.subtract(DSL.ref("age", INTEGER), DSL.literal(1))) - ); + optimize(DSL.subtract(DSL.ref("age", INTEGER), DSL.literal(1)))); } @Test void group_expression_should_be_replaced() { - assertEquals( - DSL.ref("abs(balance)", INTEGER), - optimize(DSL.abs(DSL.ref("balance", INTEGER))) - ); + assertEquals(DSL.ref("abs(balance)", INTEGER), optimize(DSL.abs(DSL.ref("balance", INTEGER)))); } @Test void aggregation_expression_should_be_replaced() { - assertEquals( - DSL.ref("AVG(age)", DOUBLE), - optimize(DSL.avg(DSL.ref("age", INTEGER))) - ); + assertEquals(DSL.ref("AVG(age)", DOUBLE), optimize(DSL.avg(DSL.ref("age", INTEGER)))); } @Test void aggregation_in_expression_should_be_replaced() { assertEquals( DSL.subtract(DSL.ref("AVG(age)", DOUBLE), DSL.literal(1)), - optimize(DSL.subtract(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(1))) - ); + optimize(DSL.subtract(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(1)))); } @Test void case_clause_should_be_replaced() { - Expression caseClause = DSL.cases( - null, - DSL.when( - DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), - DSL.literal("true"))); + Expression caseClause = + DSL.cases( + null, + DSL.when(DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), DSL.literal("true"))); LogicalPlan logicalPlan = LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("test", table), emptyList(), - ImmutableList.of(DSL.named( - "CaseClause(whenClauses=[WhenClause(condition==(age, 30), result=\"true\")]," - + " defaultResult=null)", - caseClause))); + ImmutableList.of( + DSL.named( + "CaseClause(whenClauses=[WhenClause(condition==(age, 30), result=\"true\")]," + + " defaultResult=null)", + caseClause))); assertEquals( DSL.ref( "CaseClause(whenClauses=[WhenClause(condition==(age, 30), result=\"true\")]," - + " defaultResult=null)", STRING), + + " defaultResult=null)", + STRING), optimize(caseClause, logicalPlan)); } @Test void aggregation_in_case_when_clause_should_be_replaced() { - Expression caseClause = DSL.cases( - null, - DSL.when( - DSL.equal(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(30)), - DSL.literal("true"))); + Expression caseClause = + DSL.cases( + null, + DSL.when( + DSL.equal(DSL.avg(DSL.ref("age", INTEGER)), DSL.literal(30)), DSL.literal("true"))); LogicalPlan logicalPlan = LogicalPlanDSL.aggregation( @@ -96,19 +88,16 @@ void aggregation_in_case_when_clause_should_be_replaced() { assertEquals( DSL.cases( null, - DSL.when( - DSL.equal(DSL.ref("AVG(age)", DOUBLE), DSL.literal(30)), - DSL.literal("true"))), + DSL.when(DSL.equal(DSL.ref("AVG(age)", DOUBLE), DSL.literal(30)), DSL.literal("true"))), optimize(caseClause, logicalPlan)); } @Test void aggregation_in_case_else_clause_should_be_replaced() { - Expression caseClause = DSL.cases( - DSL.avg(DSL.ref("age", INTEGER)), - DSL.when( - DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), - DSL.literal("true"))); + Expression caseClause = + DSL.cases( + DSL.avg(DSL.ref("age", INTEGER)), + DSL.when(DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), DSL.literal("true"))); LogicalPlan logicalPlan = LogicalPlanDSL.aggregation( @@ -119,9 +108,7 @@ void aggregation_in_case_else_clause_should_be_replaced() { assertEquals( DSL.cases( DSL.ref("AVG(age)", DOUBLE), - DSL.when( - DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), - DSL.literal("true"))), + DSL.when(DSL.equal(DSL.ref("age", INTEGER), DSL.literal(30)), DSL.literal("true"))), optimize(caseClause, logicalPlan)); } @@ -136,12 +123,8 @@ void window_expression_should_be_replaced() { DSL.named(DSL.denseRank()), new WindowDefinition(emptyList(), emptyList())); - assertEquals( - DSL.ref("rank()", INTEGER), - optimize(DSL.rank(), logicalPlan)); - assertEquals( - DSL.ref("dense_rank()", INTEGER), - optimize(DSL.denseRank(), logicalPlan)); + assertEquals(DSL.ref("rank()", INTEGER), optimize(DSL.rank(), logicalPlan)); + assertEquals(DSL.ref("dense_rank()", INTEGER), optimize(DSL.denseRank(), logicalPlan)); } Expression optimize(Expression expression) { @@ -158,11 +141,11 @@ Expression optimize(Expression expression, LogicalPlan logicalPlan) { LogicalPlan logicalPlan() { return LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList - .of(DSL.named("AVG(age)", DSL.avg(DSL.ref("age", INTEGER))), - DSL.named("SUM(age)", DSL.sum(DSL.ref("age", INTEGER)))), - ImmutableList.of(DSL.named("balance", DSL.ref("balance", INTEGER)), - DSL.named("abs(balance)", DSL.abs(DSL.ref("balance", INTEGER)))) - ); + ImmutableList.of( + DSL.named("AVG(age)", DSL.avg(DSL.ref("age", INTEGER))), + DSL.named("SUM(age)", DSL.sum(DSL.ref("age", INTEGER)))), + ImmutableList.of( + DSL.named("balance", DSL.ref("balance", INTEGER)), + DSL.named("abs(balance)", DSL.abs(DSL.ref("balance", INTEGER))))); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java index e9c891905c..68c508b645 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -22,8 +21,7 @@ class NamedExpressionAnalyzerTest extends AnalyzerTestBase { void visit_named_select_item() { Alias alias = AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value")); - NamedExpressionAnalyzer analyzer = - new NamedExpressionAnalyzer(expressionAnalyzer); + NamedExpressionAnalyzer analyzer = new NamedExpressionAnalyzer(expressionAnalyzer); NamedExpression analyze = analyzer.analyze(alias, analysisContext); assertEquals("integer_value", analyze.getNameOrAlias()); @@ -32,11 +30,10 @@ void visit_named_select_item() { @Test void visit_highlight() { Map args = new HashMap<>(); - Alias alias = AstDSL.alias("highlight(fieldA)", - new HighlightFunction( - AstDSL.stringLiteral("fieldA"), args)); - NamedExpressionAnalyzer analyzer = - new NamedExpressionAnalyzer(expressionAnalyzer); + Alias alias = + AstDSL.alias( + "highlight(fieldA)", new HighlightFunction(AstDSL.stringLiteral("fieldA"), args)); + NamedExpressionAnalyzer analyzer = new NamedExpressionAnalyzer(expressionAnalyzer); NamedExpression analyze = analyzer.analyze(alias, analysisContext); assertEquals("highlight(fieldA)", analyze.getNameOrAlias()); diff --git a/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java index 5833ef6ae4..3599a86918 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/QualifierAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -34,18 +33,26 @@ void should_return_original_name_if_no_qualifier() { @Test void should_report_error_if_qualifier_is_not_index() { - runInScope(new Symbol(Namespace.FIELD_NAME, "aIndex"), ARRAY, () -> { - SyntaxCheckException error = assertThrows(SyntaxCheckException.class, - () -> qualifierAnalyzer.unqualified("a", "integer_value")); - assertEquals("The qualifier [a] of qualified name [a.integer_value] " - + "must be an field name, index name or its alias", error.getMessage()); - }); + runInScope( + new Symbol(Namespace.FIELD_NAME, "aIndex"), + ARRAY, + () -> { + SyntaxCheckException error = + assertThrows( + SyntaxCheckException.class, + () -> qualifierAnalyzer.unqualified("a", "integer_value")); + assertEquals( + "The qualifier [a] of qualified name [a.integer_value] " + + "must be an field name, index name or its alias", + error.getMessage()); + }); } @Test void should_report_error_if_qualifier_is_not_exist() { - SyntaxCheckException error = assertThrows(SyntaxCheckException.class, - () -> qualifierAnalyzer.unqualified("a", "integer_value")); + SyntaxCheckException error = + assertThrows( + SyntaxCheckException.class, () -> qualifierAnalyzer.unqualified("a", "integer_value")); assertEquals( "The qualifier [a] of qualified name [a.integer_value] must be an field name, index name " + "or its alias", @@ -54,23 +61,26 @@ void should_report_error_if_qualifier_is_not_exist() { @Test void should_return_qualified_name_if_qualifier_is_index() { - runInScope(new Symbol(Namespace.INDEX_NAME, "a"), STRUCT, () -> - assertEquals("integer_value", qualifierAnalyzer.unqualified("a", "integer_value")) - ); + runInScope( + new Symbol(Namespace.INDEX_NAME, "a"), + STRUCT, + () -> assertEquals("integer_value", qualifierAnalyzer.unqualified("a", "integer_value"))); } @Test void should_return_qualified_name_if_qualifier_is_field() { - runInScope(new Symbol(Namespace.FIELD_NAME, "a"), STRUCT, () -> - assertEquals("a.integer_value", qualifierAnalyzer.unqualified("a", "integer_value")) - ); + runInScope( + new Symbol(Namespace.FIELD_NAME, "a"), + STRUCT, + () -> assertEquals("a.integer_value", qualifierAnalyzer.unqualified("a", "integer_value"))); } @Test void should_report_error_if_more_parts_in_qualified_name() { - runInScope(new Symbol(Namespace.INDEX_NAME, "a"), STRUCT, () -> - qualifierAnalyzer.unqualified("a", "integer_value", "invalid") - ); + runInScope( + new Symbol(Namespace.INDEX_NAME, "a"), + STRUCT, + () -> qualifierAnalyzer.unqualified("a", "integer_value", "invalid")); } private void runInScope(Symbol symbol, ExprType type, Runnable test) { @@ -82,5 +92,4 @@ private void runInScope(Symbol symbol, ExprType type, Runnable test) { analysisContext.pop(); } } - } diff --git a/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java b/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java index 3bd90f0081..27edc588fa 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/SelectAnalyzeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.opensearch.sql.ast.dsl.AstDSL.argument; @@ -44,8 +43,7 @@ public void project_all_from_source() { DSL.named("double_value", DSL.ref("double_value", DOUBLE)), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), DSL.named("double_value", DSL.ref("double_value", DOUBLE)), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.projectWithArg( AstDSL.relation("schema"), AstDSL.defaultFieldsArgs(), @@ -61,11 +59,9 @@ public void select_and_project_all() { LogicalPlanDSL.project( LogicalPlanDSL.relation("schema", table), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - DSL.named("double_value", DSL.ref("double_value", DOUBLE)) - ), + DSL.named("double_value", DSL.ref("double_value", DOUBLE))), DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - DSL.named("double_value", DSL.ref("double_value", DOUBLE)) - ), + DSL.named("double_value", DSL.ref("double_value", DOUBLE))), AstDSL.projectWithArg( AstDSL.projectWithArg( AstDSL.relation("schema"), @@ -73,8 +69,7 @@ public void select_and_project_all() { AstDSL.field("integer_value"), AstDSL.field("double_value")), AstDSL.defaultFieldsArgs(), - AllFields.of() - )); + AllFields.of())); } @Test @@ -84,10 +79,8 @@ public void remove_and_project_all() { LogicalPlanDSL.remove( LogicalPlanDSL.relation("schema", table), DSL.ref("integer_value", INTEGER), - DSL.ref("double_value", DOUBLE) - ), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.ref("double_value", DOUBLE)), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.projectWithArg( AstDSL.projectWithArg( AstDSL.relation("schema"), @@ -95,8 +88,7 @@ public void remove_and_project_all() { AstDSL.field("integer_value"), AstDSL.field("double_value")), AstDSL.defaultFieldsArgs(), - AllFields.of() - )); + AllFields.of())); } @Test @@ -105,20 +97,21 @@ public void stats_and_project_all() { LogicalPlanDSL.project( LogicalPlanDSL.aggregation( LogicalPlanDSL.relation("schema", table), - ImmutableList.of(DSL - .named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), + ImmutableList.of( + DSL.named("avg(integer_value)", DSL.avg(DSL.ref("integer_value", INTEGER)))), ImmutableList.of(DSL.named("string_value", DSL.ref("string_value", STRING)))), DSL.named("avg(integer_value)", DSL.ref("avg(integer_value)", DOUBLE)), - DSL.named("string_value", DSL.ref("string_value", STRING)) - ), + DSL.named("string_value", DSL.ref("string_value", STRING))), AstDSL.projectWithArg( AstDSL.agg( AstDSL.relation("schema"), - AstDSL.exprList(AstDSL.alias("avg(integer_value)", AstDSL.aggregate("avg", - field("integer_value")))), + AstDSL.exprList( + AstDSL.alias( + "avg(integer_value)", AstDSL.aggregate("avg", field("integer_value")))), null, ImmutableList.of(AstDSL.alias("string_value", field("string_value"))), - AstDSL.defaultStatsArgs()), AstDSL.defaultFieldsArgs(), + AstDSL.defaultStatsArgs()), + AstDSL.defaultFieldsArgs(), AllFields.of())); } @@ -131,14 +124,12 @@ public void rename_and_project_all() { ImmutableMap.of(DSL.ref("integer_value", INTEGER), DSL.ref("ivalue", INTEGER))), DSL.named("double_value", DSL.ref("double_value", DOUBLE)), DSL.named("string_value", DSL.ref("string_value", STRING)), - DSL.named("ivalue", DSL.ref("ivalue", INTEGER)) - ), + DSL.named("ivalue", DSL.ref("ivalue", INTEGER))), AstDSL.projectWithArg( AstDSL.rename( AstDSL.relation("schema"), AstDSL.map(AstDSL.field("integer_value"), AstDSL.field("ivalue"))), AstDSL.defaultFieldsArgs(), - AllFields.of() - )); + AllFields.of())); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java index b2fe29b509..38d4704bcd 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/SelectExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -28,23 +27,20 @@ @ExtendWith(MockitoExtension.class) public class SelectExpressionAnalyzerTest extends AnalyzerTestBase { - @Mock - private ExpressionReferenceOptimizer optimizer; + @Mock private ExpressionReferenceOptimizer optimizer; @Test public void named_expression() { assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value")) - ); + AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"))); } @Test public void named_expression_with_alias() { assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER), "int"), - AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"), "int") - ); + AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value"), "int")); } @Test @@ -52,9 +48,8 @@ public void field_name_with_qualifier() { analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - AstDSL.alias("integer_alias.integer_value", - AstDSL.qualifiedName("index_alias", "integer_value")) - ); + AstDSL.alias( + "integer_alias.integer_value", AstDSL.qualifiedName("index_alias", "integer_value"))); } @Test @@ -62,9 +57,9 @@ public void field_name_with_qualifier_quoted() { analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( DSL.named("integer_value", DSL.ref("integer_value", INTEGER)), - AstDSL.alias("`integer_alias`.integer_value", // qualifier in SELECT is quoted originally - AstDSL.qualifiedName("index_alias", "integer_value")) - ); + AstDSL.alias( + "`integer_alias`.integer_value", // qualifier in SELECT is quoted originally + AstDSL.qualifiedName("index_alias", "integer_value"))); } @Test @@ -72,21 +67,21 @@ public void field_name_in_expression_with_qualifier() { analysisContext.peek().define(new Symbol(Namespace.INDEX_NAME, "index_alias"), STRUCT); assertAnalyzeEqual( DSL.named("abs(index_alias.integer_value)", DSL.abs(DSL.ref("integer_value", INTEGER))), - AstDSL.alias("abs(index_alias.integer_value)", - AstDSL.function("abs", AstDSL.qualifiedName("index_alias", "integer_value"))) - ); + AstDSL.alias( + "abs(index_alias.integer_value)", + AstDSL.function("abs", AstDSL.qualifiedName("index_alias", "integer_value")))); } protected List analyze(UnresolvedExpression unresolvedExpression) { - doAnswer(invocation -> ((NamedExpression) invocation.getArgument(0)) - .getDelegated()).when(optimizer).optimize(any(), any()); + doAnswer(invocation -> ((NamedExpression) invocation.getArgument(0)).getDelegated()) + .when(optimizer) + .optimize(any(), any()); return new SelectExpressionAnalyzer(expressionAnalyzer) - .analyze(Arrays.asList(unresolvedExpression), - analysisContext, optimizer); + .analyze(Arrays.asList(unresolvedExpression), analysisContext, optimizer); } - protected void assertAnalyzeEqual(NamedExpression expected, - UnresolvedExpression unresolvedExpression) { + protected void assertAnalyzeEqual( + NamedExpression expected, UnresolvedExpression unresolvedExpression) { assertEquals(Arrays.asList(expected), analyze(unresolvedExpression)); } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java b/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java index c963e1d30d..91677a901e 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/TypeEnvironmentTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,9 +20,7 @@ public class TypeEnvironmentTest { - /** - * Use context class for push/pop. - */ + /** Use context class for push/pop. */ private AnalysisContext context = new AnalysisContext(); @Test @@ -69,20 +66,24 @@ public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePo assertEquals(INTEGER, environment().resolve(toSymbol(age))); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> environment().resolve(toSymbol(city))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=s.city) in type env", + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=s.city) in type env", exception.getMessage()); - exception = assertThrows(SemanticCheckException.class, - () -> environment().resolve(toSymbol(manager))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=s.manager) in type env", + exception = + assertThrows(SemanticCheckException.class, () -> environment().resolve(toSymbol(manager))); + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=s.manager) in type env", exception.getMessage()); } @Test public void resolveLiteralInEnvFailed() { - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> environment().resolve(new Symbol(Namespace.FIELD_NAME, "1"))); - assertEquals("can't resolve Symbol(namespace=FIELD_NAME, name=1) in type env", - exception.getMessage()); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> environment().resolve(new Symbol(Namespace.FIELD_NAME, "1"))); + assertEquals( + "can't resolve Symbol(namespace=FIELD_NAME, name=1) in type env", exception.getMessage()); } private TypeEnvironment environment() { @@ -92,5 +93,4 @@ private TypeEnvironment environment() { private Symbol toSymbol(ReferenceExpression ref) { return new Symbol(Namespace.FIELD_NAME, ref.getAttr()); } - } diff --git a/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java index dd4361ad6a..acb11f0b57 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/WindowExpressionAnalyzerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -75,16 +74,12 @@ void should_not_generate_sort_operator_if_no_partition_by_and_order_by_list() { LogicalPlanDSL.window( LogicalPlanDSL.relation("test", table), DSL.named("row_number", DSL.rowNumber()), - new WindowDefinition( - ImmutableList.of(), - ImmutableList.of())), + new WindowDefinition(ImmutableList.of(), ImmutableList.of())), analyzer.analyze( AstDSL.alias( "row_number", AstDSL.window( - AstDSL.function("row_number"), - ImmutableList.of(), - ImmutableList.of())), + AstDSL.function("row_number"), ImmutableList.of(), ImmutableList.of())), analysisContext)); } @@ -93,10 +88,7 @@ void should_return_original_child_if_project_item_not_windowed() { assertEquals( child, analyzer.analyze( - AstDSL.alias( - "string_value", - AstDSL.qualifiedName("string_value")), - analysisContext)); + AstDSL.alias("string_value", AstDSL.qualifiedName("string_value")), analysisContext)); } @Test @@ -114,20 +106,23 @@ void can_analyze_sort_options() { .put(new SortOption(DESC, NULL_LAST), DEFAULT_DESC) .build(); - expects.forEach((option, expect) -> { - Alias ast = AstDSL.alias( - "row_number", - AstDSL.window( - AstDSL.function("row_number"), - Collections.emptyList(), - ImmutableList.of( - ImmutablePair.of(option, AstDSL.qualifiedName("integer_value"))))); + expects.forEach( + (option, expect) -> { + Alias ast = + AstDSL.alias( + "row_number", + AstDSL.window( + AstDSL.function("row_number"), + Collections.emptyList(), + ImmutableList.of( + ImmutablePair.of(option, AstDSL.qualifiedName("integer_value"))))); - LogicalPlan plan = analyzer.analyze(ast, analysisContext); - LogicalSort sort = (LogicalSort) plan.getChild().get(0); - assertEquals(expect, sort.getSortList().get(0).getLeft(), - "Assertion failed on input option: " + option); - }); + LogicalPlan plan = analyzer.analyze(ast, analysisContext); + LogicalSort sort = (LogicalSort) plan.getChild().get(0); + assertEquals( + expect, + sort.getSortList().get(0).getLeft(), + "Assertion failed on input option: " + option); + }); } - } diff --git a/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java b/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java index ac429e89a0..775984a528 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/model/DataSourceSchemaIdentifierNameResolverTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.analysis.model; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; @@ -16,9 +15,7 @@ import static org.opensearch.sql.analysis.model.DataSourceSchemaIdentifierNameResolverTest.Identifier.identifierOf; import java.util.Arrays; -import java.util.Collections; import java.util.List; -import java.util.Set; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -29,14 +26,12 @@ @ExtendWith(MockitoExtension.class) public class DataSourceSchemaIdentifierNameResolverTest { - @Mock - private DataSourceService dataSourceService; + @Mock private DataSourceService dataSourceService; @Test void testFullyQualifiedName() { when(dataSourceService.dataSourceExists("prom")).thenReturn(Boolean.TRUE); - identifierOf( - Arrays.asList("prom", "information_schema", "tables"), dataSourceService) + identifierOf(Arrays.asList("prom", "information_schema", "tables"), dataSourceService) .datasource("prom") .schema("information_schema") .name("tables"); @@ -68,8 +63,8 @@ void defaultDataSourceNameResolve() { static class Identifier { private final DataSourceSchemaIdentifierNameResolver resolver; - protected static Identifier identifierOf(List parts, - DataSourceService dataSourceService) { + protected static Identifier identifierOf( + List parts, DataSourceService dataSourceService) { return new Identifier(parts, dataSourceService); } diff --git a/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java b/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java index 90f98e8492..176390560e 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/symbol/SymbolTableTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.analysis.symbol; import static org.hamcrest.MatcherAssert.assertThat; @@ -24,7 +23,6 @@ import org.junit.jupiter.api.Test; import org.opensearch.sql.data.type.ExprType; - public class SymbolTableTest { private SymbolTable symbolTable; @@ -60,13 +58,7 @@ public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { Map typeByName = symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")); - assertThat( - typeByName, - allOf( - aMapWithSize(1), - hasEntry("s.projects.active", BOOLEAN) - ) - ); + assertThat(typeByName, allOf(aMapWithSize(1), hasEntry("s.projects.active", BOOLEAN))); } @Test @@ -76,17 +68,11 @@ public void lookupAllFieldsReturnUnnestedFields() { symbolTable.store(new Symbol(Namespace.FIELD_NAME, "active.manager.name"), STRING); symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.address"), BOOLEAN); - Map typeByName = - symbolTable.lookupAllFields(Namespace.FIELD_NAME); + Map typeByName = symbolTable.lookupAllFields(Namespace.FIELD_NAME); assertThat( typeByName, - allOf( - aMapWithSize(2), - hasEntry("active", BOOLEAN), - hasEntry("s.address", BOOLEAN) - ) - ); + allOf(aMapWithSize(2), hasEntry("active", BOOLEAN), hasEntry("s.address", BOOLEAN))); } @Test @@ -94,8 +80,8 @@ public void failedToResolveSymbolNoNamespaceMatched() { symbolTable.store(new Symbol(Namespace.FUNCTION_NAME, "customFunction"), BOOLEAN); assertFalse(symbolTable.lookup(new Symbol(Namespace.FIELD_NAME, "s.projects")).isPresent()); - assertThat(symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")), - anEmptyMap()); + assertThat( + symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")), anEmptyMap()); } @Test @@ -111,5 +97,4 @@ private void defineSymbolShouldBeAbleToResolve(Symbol symbol, ExprType expectedT assertTrue(actualType.isPresent()); assertEquals(expectedType, actualType.get()); } - } diff --git a/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java b/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java index d9386ab122..741db07eb3 100644 --- a/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java +++ b/core/src/test/java/org/opensearch/sql/ast/expression/CastTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -23,5 +22,4 @@ void cast_to_int_and_integer_should_convert_to_same_function_impl() { new Cast(booleanLiteral(true), stringLiteral("INT")).convertFunctionName(), new Cast(booleanLiteral(true), stringLiteral("INTEGER")).convertFunctionName()); } - } diff --git a/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java b/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java index b0ab66bc0e..c211f36ba7 100644 --- a/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java +++ b/core/src/test/java/org/opensearch/sql/ast/expression/QualifiedNameTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ast.expression; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -36,5 +35,4 @@ void should_return_empty_if_only_single_part() { assertFalse(name.first().isPresent()); assertFalse(name.getPrefix().isPresent()); } - } diff --git a/core/src/test/java/org/opensearch/sql/config/TestConfig.java b/core/src/test/java/org/opensearch/sql/config/TestConfig.java index 6179f020c2..92b6aac64f 100644 --- a/core/src/test/java/org/opensearch/sql/config/TestConfig.java +++ b/core/src/test/java/org/opensearch/sql/config/TestConfig.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.config; import com.google.common.collect.ImmutableMap; @@ -23,9 +22,7 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** - * Configuration will be used for UT. - */ +/** Configuration will be used for UT. */ public class TestConfig { public static final String INT_TYPE_NULL_VALUE_FIELD = "int_null_value"; public static final String INT_TYPE_MISSING_VALUE_FIELD = "int_missing_value"; @@ -36,32 +33,33 @@ public class TestConfig { public static final String STRING_TYPE_NULL_VALUE_FIELD = "string_null_value"; public static final String STRING_TYPE_MISSING_VALUE_FIELD = "string_missing_value"; - public static Map typeMapping = new ImmutableMap.Builder() - .put("integer_value", ExprCoreType.INTEGER) - .put(INT_TYPE_NULL_VALUE_FIELD, ExprCoreType.INTEGER) - .put(INT_TYPE_MISSING_VALUE_FIELD, ExprCoreType.INTEGER) - .put("long_value", ExprCoreType.LONG) - .put("float_value", ExprCoreType.FLOAT) - .put("double_value", ExprCoreType.DOUBLE) - .put(DOUBLE_TYPE_NULL_VALUE_FIELD, ExprCoreType.DOUBLE) - .put(DOUBLE_TYPE_MISSING_VALUE_FIELD, ExprCoreType.DOUBLE) - .put("boolean_value", ExprCoreType.BOOLEAN) - .put(BOOL_TYPE_NULL_VALUE_FIELD, ExprCoreType.BOOLEAN) - .put(BOOL_TYPE_MISSING_VALUE_FIELD, ExprCoreType.BOOLEAN) - .put("string_value", ExprCoreType.STRING) - .put(STRING_TYPE_NULL_VALUE_FIELD, ExprCoreType.STRING) - .put(STRING_TYPE_MISSING_VALUE_FIELD, ExprCoreType.STRING) - .put("struct_value", ExprCoreType.STRUCT) - .put("array_value", ExprCoreType.ARRAY) - .put("timestamp_value", ExprCoreType.TIMESTAMP) - .put("field_value1", ExprCoreType.STRING) - .put("field_value2", ExprCoreType.STRING) - .put("message", ExprCoreType.STRING) - .put("message.info", ExprCoreType.STRING) - .put("message.info.id", ExprCoreType.STRING) - .put("comment", ExprCoreType.STRING) - .put("comment.data", ExprCoreType.STRING) - .build(); + public static Map typeMapping = + new ImmutableMap.Builder() + .put("integer_value", ExprCoreType.INTEGER) + .put(INT_TYPE_NULL_VALUE_FIELD, ExprCoreType.INTEGER) + .put(INT_TYPE_MISSING_VALUE_FIELD, ExprCoreType.INTEGER) + .put("long_value", ExprCoreType.LONG) + .put("float_value", ExprCoreType.FLOAT) + .put("double_value", ExprCoreType.DOUBLE) + .put(DOUBLE_TYPE_NULL_VALUE_FIELD, ExprCoreType.DOUBLE) + .put(DOUBLE_TYPE_MISSING_VALUE_FIELD, ExprCoreType.DOUBLE) + .put("boolean_value", ExprCoreType.BOOLEAN) + .put(BOOL_TYPE_NULL_VALUE_FIELD, ExprCoreType.BOOLEAN) + .put(BOOL_TYPE_MISSING_VALUE_FIELD, ExprCoreType.BOOLEAN) + .put("string_value", ExprCoreType.STRING) + .put(STRING_TYPE_NULL_VALUE_FIELD, ExprCoreType.STRING) + .put(STRING_TYPE_MISSING_VALUE_FIELD, ExprCoreType.STRING) + .put("struct_value", ExprCoreType.STRUCT) + .put("array_value", ExprCoreType.ARRAY) + .put("timestamp_value", ExprCoreType.TIMESTAMP) + .put("field_value1", ExprCoreType.STRING) + .put("field_value2", ExprCoreType.STRING) + .put("message", ExprCoreType.STRING) + .put("message.info", ExprCoreType.STRING) + .put("message.info.id", ExprCoreType.STRING) + .put("comment", ExprCoreType.STRING) + .put("comment.data", ExprCoreType.STRING) + .build(); protected StorageEngine storageEngine() { return new StorageEngine() { @@ -94,10 +92,12 @@ public PhysicalPlan implement(LogicalPlan plan) { protected SymbolTable symbolTable() { SymbolTable symbolTable = new SymbolTable(); - typeMapping.entrySet() + typeMapping + .entrySet() .forEach( - entry -> symbolTable - .store(new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); + entry -> + symbolTable.store( + new Symbol(Namespace.FIELD_NAME, entry.getKey()), entry.getValue())); return symbolTable; } diff --git a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java index c78545f9ae..01fe4a5e4e 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/DateTimeValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -47,13 +46,14 @@ public void timeValueInterfaceTest() { assertEquals(today, timeValue.dateValue(functionProperties)); assertEquals(today.atTime(1, 1, 1), timeValue.datetimeValue(functionProperties)); - assertEquals(ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), - UTC_ZONE_ID).toInstant(), timeValue.timestampValue(functionProperties)); + assertEquals( + ZonedDateTime.of(LocalTime.parse("01:01:01").atDate(today), UTC_ZONE_ID).toInstant(), + timeValue.timestampValue(functionProperties)); assertEquals("01:01:01", timeValue.value()); assertEquals("TIME '01:01:01'", timeValue.toString()); - exception = assertThrows(ExpressionEvaluationException.class, - () -> integerValue(1).timeValue()); + exception = + assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).timeValue()); assertEquals("invalid to get timeValue from value of type INTEGER", exception.getMessage()); } @@ -62,14 +62,17 @@ public void timestampValueInterfaceTest() { ExprValue timestampValue = new ExprTimestampValue("2020-07-07 01:01:01"); assertEquals(TIMESTAMP, timestampValue.type()); - assertEquals(ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), - UTC_ZONE_ID).toInstant(), timestampValue.timestampValue()); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2020-07-07T01:01:01"), UTC_ZONE_ID).toInstant(), + timestampValue.timestampValue()); assertEquals("2020-07-07 01:01:01", timestampValue.value()); assertEquals("TIMESTAMP '2020-07-07 01:01:01'", timestampValue.toString()); assertEquals(LocalDate.parse("2020-07-07"), timestampValue.dateValue()); assertEquals(LocalTime.parse("01:01:01"), timestampValue.timeValue()); assertEquals(LocalDateTime.parse("2020-07-07T01:01:01"), timestampValue.datetimeValue()); - assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).timestampValue(), + assertThrows( + ExpressionEvaluationException.class, + () -> integerValue(1).timestampValue(), "invalid to get timestampValue from value of type INTEGER"); } @@ -80,12 +83,12 @@ public void dateValueInterfaceTest() { assertEquals(LocalDate.parse("2012-07-07"), dateValue.dateValue()); assertEquals(LocalTime.parse("00:00:00"), dateValue.timeValue()); assertEquals(LocalDateTime.parse("2012-07-07T00:00:00"), dateValue.datetimeValue()); - assertEquals(ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), - UTC_ZONE_ID).toInstant(), dateValue.timestampValue()); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2012-07-07T00:00:00"), UTC_ZONE_ID).toInstant(), + dateValue.timestampValue()); ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).dateValue()); - assertEquals("invalid to get dateValue from value of type INTEGER", - exception.getMessage()); + assertEquals("invalid to get dateValue from value of type INTEGER", exception.getMessage()); } @Test @@ -95,10 +98,13 @@ public void datetimeValueInterfaceTest() { assertEquals(LocalDateTime.parse("2020-08-17T19:44:00"), datetimeValue.datetimeValue()); assertEquals(LocalDate.parse("2020-08-17"), datetimeValue.dateValue()); assertEquals(LocalTime.parse("19:44:00"), datetimeValue.timeValue()); - assertEquals(ZonedDateTime.of(LocalDateTime.parse("2020-08-17T19:44:00"), - UTC_ZONE_ID).toInstant(), datetimeValue.timestampValue()); + assertEquals( + ZonedDateTime.of(LocalDateTime.parse("2020-08-17T19:44:00"), UTC_ZONE_ID).toInstant(), + datetimeValue.timestampValue()); assertEquals("DATETIME '2020-08-17 19:44:00'", datetimeValue.toString()); - assertThrows(ExpressionEvaluationException.class, () -> integerValue(1).datetimeValue(), + assertThrows( + ExpressionEvaluationException.class, + () -> integerValue(1).datetimeValue(), "invalid to get datetimeValue from value of type INTEGER"); } @@ -106,37 +112,38 @@ public void datetimeValueInterfaceTest() { public void dateInUnsupportedFormat() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> new ExprDateValue("2020-07-07Z")); - assertEquals("date:2020-07-07Z in unsupported format, please use yyyy-MM-dd", - exception.getMessage()); + assertEquals( + "date:2020-07-07Z in unsupported format, please use 'yyyy-MM-dd'", exception.getMessage()); } @Test public void timeInUnsupportedFormat() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> new ExprTimeValue("01:01:0")); - assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss[.SSSSSSSSS]", + assertEquals( + "time:01:01:0 in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void timestampInUnsupportedFormat() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprTimestampValue("2020-07-07T01:01:01Z")); + assertThrows( + SemanticCheckException.class, () -> new ExprTimestampValue("2020-07-07T01:01:01Z")); assertEquals( "timestamp:2020-07-07T01:01:01Z in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void datetimeInUnsupportedFormat() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprDatetimeValue("2020-07-07T01:01:01Z")); + assertThrows( + SemanticCheckException.class, () -> new ExprDatetimeValue("2020-07-07T01:01:01Z")); assertEquals( "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @@ -150,11 +157,12 @@ public void stringDateTimeValue() { assertEquals("\"2020-08-17 19:44:00\"", stringValue.toString()); SemanticCheckException exception = - assertThrows(SemanticCheckException.class, + assertThrows( + SemanticCheckException.class, () -> new ExprStringValue("2020-07-07T01:01:01Z").datetimeValue()); assertEquals( "datetime:2020-07-07T01:01:01Z in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @@ -167,10 +175,10 @@ public void stringDateValue() { assertEquals("\"2020-08-17\"", stringValue.toString()); SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprStringValue("2020-07-07Z").dateValue()); - assertEquals("date:2020-07-07Z in unsupported format, please use yyyy-MM-dd", - exception.getMessage()); + assertThrows( + SemanticCheckException.class, () -> new ExprStringValue("2020-07-07Z").dateValue()); + assertEquals( + "date:2020-07-07Z in unsupported format, please use 'yyyy-MM-dd'", exception.getMessage()); } @Test @@ -181,9 +189,10 @@ public void stringTimeValue() { assertEquals("\"19:44:00\"", stringValue.toString()); SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprStringValue("01:01:0").timeValue()); - assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss[.SSSSSSSSS]", + assertThrows( + SemanticCheckException.class, () -> new ExprStringValue("01:01:0").timeValue()); + assertEquals( + "time:01:01:0 in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @@ -247,32 +256,33 @@ public void datetimeWithVariableNanoPrecision() { @Test public void timestampOverMaxNanoPrecision() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, + assertThrows( + SemanticCheckException.class, () -> new ExprTimestampValue("2020-07-07 01:01:01.1234567890")); assertEquals( - "timestamp:2020-07-07 01:01:01.1234567890 in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + "timestamp:2020-07-07 01:01:01.1234567890 in unsupported format, please use " + + "'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void datetimeOverMaxNanoPrecision() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, + assertThrows( + SemanticCheckException.class, () -> new ExprDatetimeValue("2020-07-07 01:01:01.1234567890")); assertEquals( "datetime:2020-07-07 01:01:01.1234567890 in unsupported format, " - + "please use yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]", + + "please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } @Test public void timeOverMaxNanoPrecision() { SemanticCheckException exception = - assertThrows(SemanticCheckException.class, - () -> new ExprTimeValue("01:01:01.1234567890")); + assertThrows(SemanticCheckException.class, () -> new ExprTimeValue("01:01:01.1234567890")); assertEquals( - "time:01:01:01.1234567890 in unsupported format, please use HH:mm:ss[.SSSSSSSSS]", + "time:01:01:01.1234567890 in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java index 07aac33129..b77112528c 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprBooleanValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -29,8 +28,8 @@ public void equal() { ExprValue v2 = ExprBooleanValue.of(true); assertTrue(v1.equals(v2)); assertTrue(v2.equals(v1)); - assertEquals(0, ((ExprBooleanValue)v1).compare((ExprBooleanValue)v2)); - assertEquals(0, ((ExprBooleanValue)v2).compare((ExprBooleanValue)v1)); + assertEquals(0, ((ExprBooleanValue) v1).compare((ExprBooleanValue) v2)); + assertEquals(0, ((ExprBooleanValue) v2).compare((ExprBooleanValue) v1)); } @Test @@ -44,7 +43,9 @@ public void compare() { @Test public void invalid_get_value() { ExprDateValue value = new ExprDateValue("2020-08-20"); - assertThrows(ExpressionEvaluationException.class, value::booleanValue, + assertThrows( + ExpressionEvaluationException.class, + value::booleanValue, String.format("invalid to get booleanValue from value of type %s", value.type())); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java index e61bdb111d..edd4e0d844 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprCollectionValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -48,8 +47,9 @@ public void compare_collection_with_int_object() { @Test public void comparabilityTest() { ExprValue collectionValue = ExprValueUtils.collectionValue(Arrays.asList(0, 1)); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(collectionValue, collectionValue)); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> compare(collectionValue, collectionValue)); assertEquals("ExprCollectionValue instances are not comparable", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java index ff86ad70a1..36785d383e 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprIntervalValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -41,14 +40,18 @@ public void compare() { public void invalid_compare() { ExprIntervalValue v1 = new ExprIntervalValue(Period.ofYears(1)); ExprIntervalValue v2 = new ExprIntervalValue(Duration.ofHours(1)); - assertThrows(ExpressionEvaluationException.class, () -> v1.compare(v2), + assertThrows( + ExpressionEvaluationException.class, + () -> v1.compare(v2), String.format("invalid to compare intervals with units %s and %s", v1.unit(), v2.unit())); } @Test public void invalid_get_value() { ExprDateValue value = new ExprDateValue("2020-08-20"); - assertThrows(ExpressionEvaluationException.class, value::intervalValue, + assertThrows( + ExpressionEvaluationException.class, + value::intervalValue, String.format("invalid to get intervalValue from value of type %s", value.type())); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java index 871f312c6f..121ceb6ce2 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprMissingValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -19,7 +18,6 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.exception.ExpressionEvaluationException; - class ExprMissingValueTest { @Test @@ -51,8 +49,9 @@ public void equal() { @Test public void comparabilityTest() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(LITERAL_MISSING, LITERAL_MISSING)); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> compare(LITERAL_MISSING, LITERAL_MISSING)); assertEquals("invalid to call compare operation on missing value", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java index f7087a949d..81bcf8f7b3 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprNullValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -50,8 +49,9 @@ public void equal() { @Test public void comparabilityTest() { - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(LITERAL_NULL, LITERAL_NULL)); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, () -> compare(LITERAL_NULL, LITERAL_NULL)); assertEquals("invalid to call compare operation on null value", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java index 8c3f9dc742..ee245d73b4 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprNumberValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -17,8 +16,9 @@ public class ExprNumberValueTest { @Test public void getShortValueFromIncompatibleExprValue() { ExprBooleanValue booleanValue = ExprBooleanValue.of(true); - ExpressionEvaluationException exception = Assertions - .assertThrows(ExpressionEvaluationException.class, () -> booleanValue.shortValue()); + ExpressionEvaluationException exception = + Assertions.assertThrows( + ExpressionEvaluationException.class, () -> booleanValue.shortValue()); assertEquals("invalid to get shortValue from value of type BOOLEAN", exception.getMessage()); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java index abbb730a0c..2a5e5033f7 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprStringValueTest.java @@ -27,8 +27,8 @@ public void equal() { ExprValue v2 = ExprValueUtils.stringValue("str"); assertTrue(v1.equals(v2)); assertTrue(v2.equals(v1)); - assertEquals(0, ((ExprStringValue)v1).compare((ExprStringValue)v2)); - assertEquals(0, ((ExprStringValue)v2).compare((ExprStringValue)v1)); + assertEquals(0, ((ExprStringValue) v1).compare((ExprStringValue) v2)); + assertEquals(0, ((ExprStringValue) v2).compare((ExprStringValue) v1)); } @Test @@ -42,7 +42,9 @@ public void compare() { @Test public void invalid_get_value() { ExprDateValue value = new ExprDateValue("2020-08-20"); - assertThrows(ExpressionEvaluationException.class, value::stringValue, + assertThrows( + ExpressionEvaluationException.class, + value::stringValue, String.format("invalid to get intervalValue from value of type %s", value.type())); } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java index f2568e5cb4..567e1e78db 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprTupleValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -52,8 +51,8 @@ public void compare_tuple_with_different_size() { @Test public void comparabilityTest() { ExprValue tupleValue = ExprValueUtils.tupleValue(ImmutableMap.of("integer_value", 2)); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> compare(tupleValue, tupleValue)); + ExpressionEvaluationException exception = + assertThrows(ExpressionEvaluationException.class, () -> compare(tupleValue, tupleValue)); assertEquals("ExprTupleValue instances are not comparable", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java index 3ece5e9a96..b965dff643 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueCompareTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -42,136 +41,148 @@ public void dateValueCompare() { @Test public void datetimeValueCompare() { - assertEquals(0, + assertEquals( + 0, new ExprDatetimeValue("2012-08-07 18:00:00") .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals(1, + assertEquals( + 1, new ExprDatetimeValue("2012-08-07 19:00:00") .compareTo(new ExprDatetimeValue("2012-08-07 18:00:00"))); - assertEquals(-1, + assertEquals( + -1, new ExprDatetimeValue("2012-08-07 18:00:00") .compareTo(new ExprDatetimeValue("2012-08-07 19:00:00"))); } @Test public void timestampValueCompare() { - assertEquals(0, + assertEquals( + 0, new ExprTimestampValue("2012-08-07 18:00:00") .compareTo(new ExprTimestampValue("2012-08-07 18:00:00"))); - assertEquals(1, + assertEquals( + 1, new ExprTimestampValue("2012-08-07 19:00:00") .compareTo(new ExprTimestampValue("2012-08-07 18:00:00"))); - assertEquals(-1, + assertEquals( + -1, new ExprTimestampValue("2012-08-07 18:00:00") .compareTo(new ExprTimestampValue("2012-08-07 19:00:00"))); } private static Stream getEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of(new ExprTimestampValue("1961-04-12 09:07:00"), - new ExprDatetimeValue("1961-04-12 09:07:00")), - Arguments.of(new ExprTimestampValue("1984-11-22 00:00:00"), - new ExprDateValue("1984-11-22")), - Arguments.of(new ExprTimestampValue(LocalDate.now() + " 00:00:00"), - new ExprDateValue(LocalDate.now())), - Arguments.of(new ExprDatetimeValue(LocalDate.now() + " 17:42:15"), - new ExprTimeValue("17:42:15")), - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimestampValue("2012-08-07 19:14:38")), - Arguments.of(new ExprDateValue("2012-08-07"), - new ExprDatetimeValue("2012-08-07 00:00:00")), - Arguments.of(new ExprDateValue("2007-01-27"), - new ExprDatetimeValue("2007-01-27 00:00:00")), - Arguments.of(new ExprDateValue(LocalDate.now()), - new ExprTimeValue("00:00:00")), - Arguments.of(new ExprTimestampValue("1984-11-22 00:00:00"), - new ExprDateValue("1984-11-22")), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprDatetimeValue(LocalDate.now() + " 19:14:38")), - Arguments.of(new ExprTimeValue("17:42:15"), - new ExprTimestampValue(LocalDate.now() + " 17:42:15")) - ); + Arguments.of( + new ExprTimestampValue("1961-04-12 09:07:00"), + new ExprDatetimeValue("1961-04-12 09:07:00")), + Arguments.of( + new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), + Arguments.of( + new ExprTimestampValue(LocalDate.now() + " 00:00:00"), + new ExprDateValue(LocalDate.now())), + Arguments.of( + new ExprDatetimeValue(LocalDate.now() + " 17:42:15"), new ExprTimeValue("17:42:15")), + Arguments.of( + new ExprDatetimeValue("2012-08-07 19:14:38"), + new ExprTimestampValue("2012-08-07 19:14:38")), + Arguments.of(new ExprDateValue("2012-08-07"), new ExprDatetimeValue("2012-08-07 00:00:00")), + Arguments.of(new ExprDateValue("2007-01-27"), new ExprDatetimeValue("2007-01-27 00:00:00")), + Arguments.of(new ExprDateValue(LocalDate.now()), new ExprTimeValue("00:00:00")), + Arguments.of( + new ExprTimestampValue("1984-11-22 00:00:00"), new ExprDateValue("1984-11-22")), + Arguments.of( + new ExprTimeValue("19:14:38"), new ExprDatetimeValue(LocalDate.now() + " 19:14:38")), + Arguments.of( + new ExprTimeValue("17:42:15"), new ExprTimestampValue(LocalDate.now() + " 17:42:15"))); } /** - * We can't compare directly ExprValues of different datetime types, we need to use - * `FunctionProperties` object to extract comparable values. + * We can't compare directly ExprValues of different datetime types, we need to use + * `FunctionProperties` object to extract comparable values. */ @ParameterizedTest @MethodSource("getEqualDatetimeValuesOfDifferentTypes") public void compareEqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { - assertEquals(0, extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertEquals(0, extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + assertEquals( + 0, + extractDateTime(left, functionProperties) + .compareTo(extractDateTime(right, functionProperties))); + assertEquals( + 0, + extractDateTime(right, functionProperties) + .compareTo(extractDateTime(left, functionProperties))); } private static Stream getNotEqualDatetimeValuesOfDifferentTypes() { return Stream.of( - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), + Arguments.of( + new ExprDatetimeValue("2012-08-07 19:14:38"), new ExprTimestampValue("1961-04-12 09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprDatetimeValue(LocalDate.now() + " 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprDatetimeValue("2012-08-07 00:00:00"), - new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDatetimeValue("1961-04-12 19:14:38"), - new ExprDateValue("1961-04-12")), - Arguments.of(new ExprDateValue("1984-11-22"), - new ExprDatetimeValue("1961-04-12 19:14:38")), - Arguments.of(new ExprDateValue("1984-11-22"), - new ExprTimestampValue("2020-09-16 17:30:00")), - Arguments.of(new ExprDateValue("1984-11-22"), - new ExprTimeValue("19:14:38")), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprDateValue(LocalDate.now())), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprDatetimeValue("2012-08-07 09:07:00")), - Arguments.of(new ExprTimeValue("19:14:38"), - new ExprTimestampValue("1984-02-03 04:05:07")), - Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), + Arguments.of(new ExprDatetimeValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of( + new ExprDatetimeValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of(new ExprDatetimeValue("2012-08-07 00:00:00"), new ExprDateValue("1961-04-12")), + Arguments.of(new ExprDatetimeValue("1961-04-12 19:14:38"), new ExprDateValue("1961-04-12")), + Arguments.of(new ExprDateValue("1984-11-22"), new ExprDatetimeValue("1961-04-12 19:14:38")), + Arguments.of( + new ExprDateValue("1984-11-22"), new ExprTimestampValue("2020-09-16 17:30:00")), + Arguments.of(new ExprDateValue("1984-11-22"), new ExprTimeValue("19:14:38")), + Arguments.of(new ExprTimeValue("19:14:38"), new ExprDateValue(LocalDate.now())), + Arguments.of(new ExprTimeValue("19:14:38"), new ExprDatetimeValue("2012-08-07 09:07:00")), + Arguments.of(new ExprTimeValue("19:14:38"), new ExprTimestampValue("1984-02-03 04:05:07")), + Arguments.of( + new ExprTimestampValue("2012-08-07 19:14:38"), new ExprDatetimeValue("1961-04-12 09:07:00")), - Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprTimestampValue(LocalDate.now() + " 19:14:38"), - new ExprTimeValue("09:07:00")), - Arguments.of(new ExprTimestampValue("2012-08-07 00:00:00"), - new ExprDateValue("1961-04-12")), - Arguments.of(new ExprTimestampValue("1961-04-12 19:14:38"), - new ExprDateValue("1961-04-12")) - ); + Arguments.of(new ExprTimestampValue("2012-08-07 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of( + new ExprTimestampValue(LocalDate.now() + " 19:14:38"), new ExprTimeValue("09:07:00")), + Arguments.of( + new ExprTimestampValue("2012-08-07 00:00:00"), new ExprDateValue("1961-04-12")), + Arguments.of( + new ExprTimestampValue("1961-04-12 19:14:38"), new ExprDateValue("1961-04-12"))); } /** - * We can't compare directly ExprValues of different datetime types, we need to use - * `FunctionProperties` object to extract comparable values. + * We can't compare directly ExprValues of different datetime types, we need to use + * `FunctionProperties` object to extract comparable values. */ @ParameterizedTest @MethodSource("getNotEqualDatetimeValuesOfDifferentTypes") public void compareNeqDifferentDateTimeValueTypes(ExprValue left, ExprValue right) { - assertNotEquals(0, extractDateTime(left, functionProperties) - .compareTo(extractDateTime(right, functionProperties))); - assertNotEquals(0, extractDateTime(right, functionProperties) - .compareTo(extractDateTime(left, functionProperties))); + assertNotEquals( + 0, + extractDateTime(left, functionProperties) + .compareTo(extractDateTime(right, functionProperties))); + assertNotEquals( + 0, + extractDateTime(right, functionProperties) + .compareTo(extractDateTime(left, functionProperties))); } @Test public void compareDateTimeWithNotADateTime() { - var exception = assertThrows(ExpressionEvaluationException.class, () -> - new ExprDoubleValue(3.1415).compareTo(new ExprIntervalValue(Period.ofDays(1)))); - assertEquals("compare expected value have same type, but with [DOUBLE, INTERVAL]", + var exception = + assertThrows( + ExpressionEvaluationException.class, + () -> new ExprDoubleValue(3.1415).compareTo(new ExprIntervalValue(Period.ofDays(1)))); + assertEquals( + "compare expected value have same type, but with [DOUBLE, INTERVAL]", exception.getMessage()); - exception = assertThrows(ExpressionEvaluationException.class, () -> - new ExprDateValue("1961-04-12").compareTo(new ExprIntegerValue(1))); - assertEquals("compare expected value have same type, but with [DATE, INTEGER]", - exception.getMessage()); + exception = + assertThrows( + ExpressionEvaluationException.class, + () -> new ExprDateValue("1961-04-12").compareTo(new ExprIntegerValue(1))); + assertEquals( + "compare expected value have same type, but with [DATE, INTEGER]", exception.getMessage()); - exception = assertThrows(ExpressionEvaluationException.class, () -> - new ExprStringValue("something").compareTo(new ExprTimeValue("10:20:30"))); - assertEquals("compare expected value have same type, but with [STRING, TIME]", - exception.getMessage()); + exception = + assertThrows( + ExpressionEvaluationException.class, + () -> new ExprStringValue("something").compareTo(new ExprTimeValue("10:20:30"))); + assertEquals( + "compare expected value have same type, but with [STRING, TIME]", exception.getMessage()); } @Test @@ -194,8 +205,7 @@ private static Stream getEqualNumericValuesOfDifferentTypes() { Arguments.of(new ExprIntegerValue(42), new ExprShortValue(42)), Arguments.of(new ExprIntegerValue(42), new ExprLongValue(42)), Arguments.of(new ExprIntegerValue(42), new ExprFloatValue(42)), - Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(42)) - ); + Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(42))); } @ParameterizedTest @@ -211,8 +221,7 @@ private static Stream getNotEqualNumericValuesOfDifferentTypes() { Arguments.of(new ExprIntegerValue(42), new ExprShortValue(146)), Arguments.of(new ExprIntegerValue(42), new ExprLongValue(100500)), Arguments.of(new ExprIntegerValue(42), new ExprFloatValue(-1.5)), - Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(1468.84138)) - ); + Arguments.of(new ExprIntegerValue(42), new ExprDoubleValue(1468.84138))); } @ParameterizedTest @@ -231,47 +240,50 @@ public void stringValueCompare() { @Test public void intervalValueCompare() { - assertEquals(0, new ExprIntervalValue(Period.ofDays(1)) - .compareTo(new ExprIntervalValue(Period.ofDays(1)))); - assertEquals(1, new ExprIntervalValue(Period.ofDays(2)) - .compareTo(new ExprIntervalValue(Period.ofDays(1)))); - assertEquals(-1, new ExprIntervalValue(Period.ofDays(1)) - .compareTo(new ExprIntervalValue(Period.ofDays(2)))); + assertEquals( + 0, + new ExprIntervalValue(Period.ofDays(1)).compareTo(new ExprIntervalValue(Period.ofDays(1)))); + assertEquals( + 1, + new ExprIntervalValue(Period.ofDays(2)).compareTo(new ExprIntervalValue(Period.ofDays(1)))); + assertEquals( + -1, + new ExprIntervalValue(Period.ofDays(1)).compareTo(new ExprIntervalValue(Period.ofDays(2)))); } @Test public void missingCompareToMethodShouldNotBeenCalledDirectly() { - IllegalStateException exception = assertThrows(IllegalStateException.class, - () -> LITERAL_MISSING.compareTo(LITERAL_FALSE)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> LITERAL_MISSING.compareTo(LITERAL_FALSE)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> LITERAL_FALSE.compareTo(LITERAL_MISSING)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + exception = + assertThrows(IllegalStateException.class, () -> LITERAL_FALSE.compareTo(LITERAL_MISSING)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> ExprMissingValue.of().compare(LITERAL_MISSING)); - assertEquals("[BUG] Unreachable, Comparing with MISSING is undefined", - exception.getMessage()); + exception = + assertThrows( + IllegalStateException.class, () -> ExprMissingValue.of().compare(LITERAL_MISSING)); + assertEquals("[BUG] Unreachable, Comparing with MISSING is undefined", exception.getMessage()); } @Test public void nullCompareToMethodShouldNotBeenCalledDirectly() { - IllegalStateException exception = assertThrows(IllegalStateException.class, - () -> LITERAL_NULL.compareTo(LITERAL_FALSE)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> LITERAL_NULL.compareTo(LITERAL_FALSE)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> LITERAL_FALSE.compareTo(LITERAL_NULL)); - assertEquals("[BUG] Unreachable, Comparing with NULL or MISSING is undefined", - exception.getMessage()); + exception = + assertThrows(IllegalStateException.class, () -> LITERAL_FALSE.compareTo(LITERAL_NULL)); + assertEquals( + "[BUG] Unreachable, Comparing with NULL or MISSING is undefined", exception.getMessage()); - exception = assertThrows(IllegalStateException.class, - () -> ExprNullValue.of().compare(LITERAL_MISSING)); - assertEquals("[BUG] Unreachable, Comparing with NULL is undefined", - exception.getMessage()); + exception = + assertThrows( + IllegalStateException.class, () -> ExprNullValue.of().compare(LITERAL_MISSING)); + assertEquals("[BUG] Unreachable, Comparing with NULL is undefined", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java index c4a00fe5ca..c879384955 100644 --- a/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java +++ b/core/src/test/java/org/opensearch/sql/data/model/ExprValueUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.model; import static org.hamcrest.MatcherAssert.assertThat; @@ -58,71 +57,87 @@ public class ExprValueUtilsTest { testTuple.put("1", new ExprIntegerValue(1)); } - private static List numberValues = Stream.of((byte) 1, (short) 1, 1, 1L, 1f, 1D) - .map(ExprValueUtils::fromObjectValue).collect(Collectors.toList()); + private static List numberValues = + Stream.of((byte) 1, (short) 1, 1, 1L, 1f, 1D) + .map(ExprValueUtils::fromObjectValue) + .collect(Collectors.toList()); - private static List nonNumberValues = Arrays.asList( - new ExprStringValue("1"), - ExprBooleanValue.of(true), - new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))), - new ExprTupleValue(testTuple), - new ExprDateValue("2012-08-07"), - new ExprTimeValue("18:00:00"), - new ExprDatetimeValue("2012-08-07 18:00:00"), - new ExprTimestampValue("2012-08-07 18:00:00"), - new ExprIntervalValue(Duration.ofSeconds(100))); + private static List nonNumberValues = + Arrays.asList( + new ExprStringValue("1"), + ExprBooleanValue.of(true), + new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))), + new ExprTupleValue(testTuple), + new ExprDateValue("2012-08-07"), + new ExprTimeValue("18:00:00"), + new ExprDatetimeValue("2012-08-07 18:00:00"), + new ExprTimestampValue("2012-08-07 18:00:00"), + new ExprIntervalValue(Duration.ofSeconds(100))); private static List allValues = Lists.newArrayList(Iterables.concat(numberValues, nonNumberValues)); - private static List> numberValueExtractor = Arrays.asList( - ExprValueUtils::getByteValue, - ExprValueUtils::getShortValue, - ExprValueUtils::getIntegerValue, - ExprValueUtils::getLongValue, - ExprValueUtils::getFloatValue, - ExprValueUtils::getDoubleValue); - private static List> nonNumberValueExtractor = Arrays.asList( - ExprValueUtils::getStringValue, - ExprValueUtils::getBooleanValue, - ExprValueUtils::getCollectionValue, - ExprValueUtils::getTupleValue - ); - private static List> dateAndTimeValueExtractor = Arrays.asList( - ExprValue::dateValue, - ExprValue::timeValue, - ExprValue::datetimeValue, - ExprValue::timestampValue, - ExprValue::intervalValue); - private static List> allValueExtractor = Lists.newArrayList( - Iterables.concat(numberValueExtractor, nonNumberValueExtractor, dateAndTimeValueExtractor)); + private static List> numberValueExtractor = + Arrays.asList( + ExprValueUtils::getByteValue, + ExprValueUtils::getShortValue, + ExprValueUtils::getIntegerValue, + ExprValueUtils::getLongValue, + ExprValueUtils::getFloatValue, + ExprValueUtils::getDoubleValue); + private static List> nonNumberValueExtractor = + Arrays.asList( + ExprValueUtils::getStringValue, + ExprValueUtils::getBooleanValue, + ExprValueUtils::getCollectionValue, + ExprValueUtils::getTupleValue); + private static List> dateAndTimeValueExtractor = + Arrays.asList( + ExprValue::dateValue, + ExprValue::timeValue, + ExprValue::datetimeValue, + ExprValue::timestampValue, + ExprValue::intervalValue); + private static List> allValueExtractor = + Lists.newArrayList( + Iterables.concat( + numberValueExtractor, nonNumberValueExtractor, dateAndTimeValueExtractor)); private static List numberTypes = - Arrays.asList(ExprCoreType.BYTE, ExprCoreType.SHORT, ExprCoreType.INTEGER, ExprCoreType.LONG, - ExprCoreType.FLOAT, ExprCoreType.DOUBLE); - private static List nonNumberTypes = - Arrays.asList(STRING, BOOLEAN, ARRAY, STRUCT); + Arrays.asList( + ExprCoreType.BYTE, + ExprCoreType.SHORT, + ExprCoreType.INTEGER, + ExprCoreType.LONG, + ExprCoreType.FLOAT, + ExprCoreType.DOUBLE); + private static List nonNumberTypes = Arrays.asList(STRING, BOOLEAN, ARRAY, STRUCT); private static List dateAndTimeTypes = Arrays.asList(DATE, TIME, DATETIME, TIMESTAMP, INTERVAL); private static List allTypes = Lists.newArrayList(Iterables.concat(numberTypes, nonNumberTypes, dateAndTimeTypes)); private static Stream getValueTestArgumentStream() { - List expectedValues = Arrays.asList((byte) 1, (short) 1, 1, 1L, 1f, 1D, "1", true, - Arrays.asList(integerValue(1)), - ImmutableMap.of("1", integerValue(1)), - LocalDate.parse("2012-08-07"), - LocalTime.parse("18:00:00"), - LocalDateTime.parse("2012-08-07T18:00:00"), - ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), UTC_ZONE_ID).toInstant(), - Duration.ofSeconds(100) - ); + List expectedValues = + Arrays.asList( + (byte) 1, + (short) 1, + 1, + 1L, + 1f, + 1D, + "1", + true, + Arrays.asList(integerValue(1)), + ImmutableMap.of("1", integerValue(1)), + LocalDate.parse("2012-08-07"), + LocalTime.parse("18:00:00"), + LocalDateTime.parse("2012-08-07T18:00:00"), + ZonedDateTime.of(LocalDateTime.parse("2012-08-07T18:00:00"), UTC_ZONE_ID).toInstant(), + Duration.ofSeconds(100)); Stream.Builder builder = Stream.builder(); for (int i = 0; i < expectedValues.size(); i++) { - builder.add(Arguments.of( - allValues.get(i), - allValueExtractor.get(i), - expectedValues.get(i))); + builder.add(Arguments.of(allValues.get(i), allValueExtractor.get(i), expectedValues.get(i))); } return builder.build(); } @@ -130,16 +145,13 @@ private static Stream getValueTestArgumentStream() { private static Stream getTypeTestArgumentStream() { Stream.Builder builder = Stream.builder(); for (int i = 0; i < allValues.size(); i++) { - builder.add(Arguments.of( - allValues.get(i), - allTypes.get(i))); + builder.add(Arguments.of(allValues.get(i), allTypes.get(i))); } return builder.build(); } private static Stream invalidGetNumberValueArgumentStream() { - return Lists.cartesianProduct(nonNumberValues, numberValueExtractor) - .stream() + return Lists.cartesianProduct(nonNumberValues, numberValueExtractor).stream() .map(list -> Arguments.of(list.get(0), list.get(1))); } @@ -151,23 +163,20 @@ private static Stream invalidConvert() { extractorWithTypeList.add( new AbstractMap.SimpleEntry<>(nonNumberValueExtractor.get(i), nonNumberTypes.get(i))); } - return Lists.cartesianProduct(allValues, extractorWithTypeList) - .stream() - .filter(list -> { - ExprValue value = (ExprValue) list.get(0); - Map.Entry, ExprCoreType> entry = - (Map.Entry, - ExprCoreType>) list - .get(1); - return entry.getValue() != value.type(); - }) - .map(list -> { - Map.Entry, ExprCoreType> entry = - (Map.Entry, - ExprCoreType>) list - .get(1); - return Arguments.of(list.get(0), entry.getKey(), entry.getValue()); - }); + return Lists.cartesianProduct(allValues, extractorWithTypeList).stream() + .filter( + list -> { + ExprValue value = (ExprValue) list.get(0); + Map.Entry, ExprCoreType> entry = + (Map.Entry, ExprCoreType>) list.get(1); + return entry.getValue() != value.type(); + }) + .map( + list -> { + Map.Entry, ExprCoreType> entry = + (Map.Entry, ExprCoreType>) list.get(1); + return Arguments.of(list.get(0), entry.getKey(), entry.getValue()); + }); } @ParameterizedTest(name = "the value of ExprValue:{0} is: {2} ") @@ -182,36 +191,33 @@ public void getType(ExprValue value, ExprCoreType expectType) { assertEquals(expectType, value.type()); } - /** - * Test Invalid to get number. - */ + /** Test Invalid to get number. */ @ParameterizedTest(name = "invalid to get number value of ExprValue:{0}") @MethodSource("invalidGetNumberValueArgumentStream") public void invalidGetNumberValue(ExprValue value, Function extractor) { - Exception exception = assertThrows(ExpressionEvaluationException.class, - () -> extractor.apply(value)); + Exception exception = + assertThrows(ExpressionEvaluationException.class, () -> extractor.apply(value)); assertThat(exception.getMessage(), Matchers.containsString("invalid")); } - /** - * Test Invalid to convert. - */ + /** Test Invalid to convert. */ @ParameterizedTest(name = "invalid convert ExprValue:{0} to ExprType:{2}") @MethodSource("invalidConvert") - public void invalidConvertExprValue(ExprValue value, Function extractor, - ExprCoreType toType) { - Exception exception = assertThrows(ExpressionEvaluationException.class, - () -> extractor.apply(value)); + public void invalidConvertExprValue( + ExprValue value, Function extractor, ExprCoreType toType) { + Exception exception = + assertThrows(ExpressionEvaluationException.class, () -> extractor.apply(value)); assertThat(exception.getMessage(), Matchers.containsString("invalid")); } @Test public void unSupportedObject() { - Exception exception = assertThrows(ExpressionEvaluationException.class, - () -> ExprValueUtils.fromObjectValue(integerValue(1))); + Exception exception = + assertThrows( + ExpressionEvaluationException.class, + () -> ExprValueUtils.fromObjectValue(integerValue(1))); assertEquals( - "unsupported object " - + "class org.opensearch.sql.data.model.ExprIntegerValue", + "unsupported object " + "class org.opensearch.sql.data.model.ExprIntegerValue", exception.getMessage()); } @@ -228,13 +234,14 @@ public void bindingTuples() { @Test public void constructDateAndTimeValue() { - assertEquals(new ExprDateValue("2012-07-07"), - ExprValueUtils.fromObjectValue("2012-07-07", DATE)); - assertEquals(new ExprTimeValue("01:01:01"), - ExprValueUtils.fromObjectValue("01:01:01", TIME)); - assertEquals(new ExprDatetimeValue("2012-07-07 01:01:01"), + assertEquals( + new ExprDateValue("2012-07-07"), ExprValueUtils.fromObjectValue("2012-07-07", DATE)); + assertEquals(new ExprTimeValue("01:01:01"), ExprValueUtils.fromObjectValue("01:01:01", TIME)); + assertEquals( + new ExprDatetimeValue("2012-07-07 01:01:01"), ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", DATETIME)); - assertEquals(new ExprTimestampValue("2012-07-07 01:01:01"), + assertEquals( + new ExprTimestampValue("2012-07-07 01:01:01"), ExprValueUtils.fromObjectValue("2012-07-07 01:01:01", TIMESTAMP)); } @@ -244,17 +251,20 @@ public void hashCodeTest() { assertEquals(new ExprShortValue(1).hashCode(), new ExprShortValue(1).hashCode()); assertEquals(new ExprIntegerValue(1).hashCode(), new ExprIntegerValue(1).hashCode()); assertEquals(new ExprStringValue("1").hashCode(), new ExprStringValue("1").hashCode()); - assertEquals(new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode(), + assertEquals( + new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode(), new ExprCollectionValue(ImmutableList.of(new ExprIntegerValue(1))).hashCode()); - assertEquals(new ExprTupleValue(testTuple).hashCode(), - new ExprTupleValue(testTuple).hashCode()); - assertEquals(new ExprDateValue("2012-08-07").hashCode(), - new ExprDateValue("2012-08-07").hashCode()); - assertEquals(new ExprTimeValue("18:00:00").hashCode(), - new ExprTimeValue("18:00:00").hashCode()); - assertEquals(new ExprDatetimeValue("2012-08-07 18:00:00").hashCode(), + assertEquals( + new ExprTupleValue(testTuple).hashCode(), new ExprTupleValue(testTuple).hashCode()); + assertEquals( + new ExprDateValue("2012-08-07").hashCode(), new ExprDateValue("2012-08-07").hashCode()); + assertEquals( + new ExprTimeValue("18:00:00").hashCode(), new ExprTimeValue("18:00:00").hashCode()); + assertEquals( + new ExprDatetimeValue("2012-08-07 18:00:00").hashCode(), new ExprDatetimeValue("2012-08-07 18:00:00").hashCode()); - assertEquals(new ExprTimestampValue("2012-08-07 18:00:00").hashCode(), + assertEquals( + new ExprTimestampValue("2012-08-07 18:00:00").hashCode(), new ExprTimestampValue("2012-08-07 18:00:00").hashCode()); } } diff --git a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java index 7db856d092..1def15cc6f 100644 --- a/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java +++ b/core/src/test/java/org/opensearch/sql/data/type/ExprTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.type; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java index ec00aebe18..cf9bb538c4 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/ExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -32,10 +31,8 @@ @ExtendWith(MockitoExtension.class) class ExprValueOrderingTest { - @Mock - ExprValue left; - @Mock - ExprValue right; + @Mock ExprValue left; + @Mock ExprValue right; @Test public void natural() { diff --git a/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java index c13f95ad63..97014fc410 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/NullsFirstExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java index ab36247089..3625dc3760 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/NullsLastExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java b/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java index 297079fca5..ad88eb2c76 100644 --- a/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java +++ b/core/src/test/java/org/opensearch/sql/data/utils/ReverseExprValueOrderingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.data.utils; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java index 0a9e42e2cd..e2927a4b0c 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java @@ -151,7 +151,8 @@ public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { throw new RuntimeException(e); } - if (updateResponse.getResult().equals(DocWriteResponse.Result.UPDATED)) { + if (updateResponse.getResult().equals(DocWriteResponse.Result.UPDATED) + || updateResponse.getResult().equals(DocWriteResponse.Result.NOOP)) { LOG.debug("DatasourceMetadata : {} successfully updated", dataSourceMetadata.getName()); } else { throw new RuntimeException("Saving dataSource metadata information failed with result : " diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java index bab568bc32..b58ef3ea1e 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java @@ -473,6 +473,24 @@ public void testUpdateDataSourceMetadata() { } + @Test + public void testUpdateDataSourceMetadataWithNOOP() { + Mockito.when(encryptor.encrypt("secret_key")).thenReturn("secret_key"); + Mockito.when(encryptor.encrypt("access_key")).thenReturn("access_key"); + Mockito.when(client.update(ArgumentMatchers.any())).thenReturn(updateResponseActionFuture); + Mockito.when(updateResponseActionFuture.actionGet()).thenReturn(updateResponse); + Mockito.when(updateResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP); + DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); + + this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata(dataSourceMetadata); + + Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); + Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); + Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); + Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); + Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); + } + @Test public void testUpdateDataSourceMetadataWithNotFoundResult() { Mockito.when(encryptor.encrypt("secret_key")).thenReturn("secret_key"); diff --git a/doctest/test_data/nested_objects.json b/doctest/test_data/nested_objects.json index fc5f56b4c5..bee976806e 100644 --- a/doctest/test_data/nested_objects.json +++ b/doctest/test_data/nested_objects.json @@ -1,4 +1,2 @@ -{"index":{"_id":"1"}} {"message":{"info":"a","author":"e","dayOfWeek":1},"comment":{"data":"ab","likes":3},"myNum":1,"someField":"b"} -{"index":{"_id":"2"}} {"message":{"info":"b","author":"f","dayOfWeek":2},"comment":{"data":"aa","likes":2},"myNum":2,"someField":"a"} diff --git a/doctest/test_data/wildcard.json b/doctest/test_data/wildcard.json index c91778d8ab..21256ed8dc 100644 --- a/doctest/test_data/wildcard.json +++ b/doctest/test_data/wildcard.json @@ -1,22 +1,11 @@ -{"index":{"_id":"0"}} {"Body":"test wildcard"} -{"index":{"_id":"1"}} {"Body":"test wildcard in the end of the text%"} -{"index":{"_id":"2"}} {"Body":"%test wildcard in the beginning of the text"} -{"index":{"_id":"3"}} {"Body":"test wildcard in % the middle of the text"} -{"index":{"_id":"4"}} {"Body":"test wildcard %% beside each other"} -{"index":{"_id":"5"}} {"Body":"test wildcard in the end of the text_"} -{"index":{"_id":"6"}} {"Body":"_test wildcard in the beginning of the text"} -{"index":{"_id":"7"}} {"Body":"test wildcard in _ the middle of the text"} -{"index":{"_id":"8"}} {"Body":"test wildcard __ beside each other"} -{"index":{"_id":"9"}} {"Body":"test backslash wildcard \\_"} -{"index":{"_id":"10"}} {"Body":"tEsT wIlDcArD sensitive cases"}