diff --git a/build.gradle b/build.gradle index 2bdc4865bb..2ab7abc42a 100644 --- a/build.gradle +++ b/build.gradle @@ -93,7 +93,9 @@ spotless { 'spark/**/*.java', 'plugin/**/*.java', 'ppl/**/*.java', - 'integ-test/**/*java' + 'integ-test/**/*java', + 'core/**/*.java', + 'opensearch/**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java index c6d44e2c23..993e092534 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -40,9 +39,7 @@ public class OpenSearchNodeClient implements OpenSearchClient { /** Node client provided by OpenSearch container. */ private final NodeClient client; - /** - * Constructor of OpenSearchNodeClient. - */ + /** Constructor of OpenSearchNodeClient. */ public OpenSearchNodeClient(NodeClient client) { this.client = client; } @@ -50,8 +47,8 @@ public OpenSearchNodeClient(NodeClient client) { @Override public boolean exists(String indexName) { try { - IndicesExistsResponse checkExistResponse = client.admin().indices() - .exists(new IndicesExistsRequest(indexName)).actionGet(); + IndicesExistsResponse checkExistResponse = + client.admin().indices().exists(new IndicesExistsRequest(indexName)).actionGet(); return checkExistResponse.isExists(); } catch (Exception e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exists", e); @@ -83,13 +80,12 @@ public void createIndex(String indexName, Map mappings) { @Override public Map getIndexMappings(String... indexExpression) { try { - GetMappingsResponse mappingsResponse = client.admin().indices() - .prepareGetMappings(indexExpression) - .setLocal(true) - .get(); - return mappingsResponse.mappings().entrySet().stream().collect(Collectors.toUnmodifiableMap( - Map.Entry::getKey, - cursor -> new IndexMapping(cursor.getValue()))); + GetMappingsResponse mappingsResponse = + client.admin().indices().prepareGetMappings(indexExpression).setLocal(true).get(); + return mappingsResponse.mappings().entrySet().stream() + .collect( + Collectors.toUnmodifiableMap( + Map.Entry::getKey, cursor -> new IndexMapping(cursor.getValue()))); } catch (IndexNotFoundException e) { // Re-throw directly to be treated as client error finally throw e; @@ -127,15 +123,11 @@ public Map getIndexMaxResultWindows(String... indexExpression) } } - /** - * TODO: Scroll doesn't work for aggregation. Support aggregation later. - */ + /** TODO: Scroll doesn't work for aggregation. Support aggregation later. */ @Override public OpenSearchResponse search(OpenSearchRequest request) { return request.search( - req -> client.search(req).actionGet(), - req -> client.searchScroll(req).actionGet() - ); + req -> client.search(req).actionGet(), req -> client.searchScroll(req).actionGet()); } /** @@ -145,13 +137,12 @@ public OpenSearchResponse search(OpenSearchRequest request) { */ @Override public List indices() { - final GetIndexResponse indexResponse = client.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get(); + final GetIndexResponse indexResponse = + client.admin().indices().prepareGetIndex().setLocal(true).get(); final Stream aliasStream = ImmutableList.copyOf(indexResponse.aliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); @@ -164,20 +155,20 @@ public List indices() { */ @Override public Map meta() { - return ImmutableMap.of(META_CLUSTER_NAME, - client.settings().get("cluster.name", "opensearch")); + return ImmutableMap.of(META_CLUSTER_NAME, client.settings().get("cluster.name", "opensearch")); } @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - client.prepareClearScroll().addScrollId(scrollId).get(); - } catch (Exception e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + client.prepareClearScroll().addScrollId(scrollId).get(); + } catch (Exception e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java index c27c4bbc30..b6106982a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -49,8 +48,7 @@ public class OpenSearchRestClient implements OpenSearchClient { @Override public boolean exists(String indexName) { try { - return client.indices().exists( - new GetIndexRequest(indexName), RequestOptions.DEFAULT); + return client.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exist", e); } @@ -59,8 +57,9 @@ public boolean exists(String indexName) { @Override public void createIndex(String indexName, Map mappings) { try { - client.indices().create( - new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); + client + .indices() + .create(new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to create index [" + indexName + "]", e); } @@ -80,27 +79,29 @@ public Map getIndexMappings(String... indexExpression) { @Override public Map getIndexMaxResultWindows(String... indexExpression) { - GetSettingsRequest request = new GetSettingsRequest() - .indices(indexExpression).includeDefaults(true); + GetSettingsRequest request = + new GetSettingsRequest().indices(indexExpression).includeDefaults(true); try { GetSettingsResponse response = client.indices().getSettings(request, RequestOptions.DEFAULT); Map settings = response.getIndexToSettings(); Map defaultSettings = response.getIndexToDefaultSettings(); Map result = new HashMap<>(); - defaultSettings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); - - settings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); + defaultSettings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); + + settings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); return result; } catch (IOException e) { @@ -126,8 +127,7 @@ public OpenSearchResponse search(OpenSearchRequest request) { throw new IllegalStateException( "Failed to perform scroll operation with request " + req, e); } - } - ); + }); } /** @@ -142,7 +142,8 @@ public List indices() { client.indices().get(new GetIndexRequest(), RequestOptions.DEFAULT); final Stream aliasStream = ImmutableList.copyOf(indexResponse.getAliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); } catch (IOException e) { @@ -173,16 +174,17 @@ public Map meta() { @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - ClearScrollRequest clearRequest = new ClearScrollRequest(); - clearRequest.addScrollId(scrollId); - client.clearScroll(clearRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + ClearScrollRequest clearRequest = new ClearScrollRequest(); + clearRequest.addScrollId(scrollId); + client.clearScroll(clearRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java index c2428a59a8..75137973c5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of a geo_point value. See - * doc + * The type of a geo_point value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchGeoPointType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java index fccafc6caf..22581ec28c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of an ip value. See - * doc + * The type of an ip value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchIpType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java index 67b7296834..e7e453ca3f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java @@ -15,8 +15,8 @@ import org.opensearch.sql.data.type.ExprType; /** - * The type of a text value. See - * doc + * The type of text value. See doc */ public class OpenSearchTextType extends OpenSearchDataType { @@ -24,8 +24,7 @@ public class OpenSearchTextType extends OpenSearchDataType { // text could have fields // a read-only collection - @EqualsAndHashCode.Exclude - Map fields = ImmutableMap.of(); + @EqualsAndHashCode.Exclude Map fields = ImmutableMap.of(); private OpenSearchTextType() { super(MappingType.Text); @@ -34,6 +33,7 @@ private OpenSearchTextType() { /** * Constructs a Text Type using the passed in fields argument. + * * @param fields The fields to be used to construct the text type. * @return A new OpenSeachTextTypeObject */ @@ -67,7 +67,7 @@ protected OpenSearchDataType cloneEmpty() { } /** - * Text field doesn't have doc value (exception thrown even when you call "get") + * Text field doesn't have doc value (exception thrown even when you call "get")
* Limitation: assume inner field name is always "keyword". */ public static String convertTextToKeyword(String fieldName, ExprType fieldType) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java index 0c3d2aec45..0fbd2d4f98 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java @@ -10,12 +10,12 @@ import org.apache.commons.lang3.tuple.Pair; /** - * * Regardless the underling data format, the {@link Content} define the data in abstract manner. * which could be parsed by ElasticsearchExprValueFactory. There are two major use cases: + * *
    - *
  1. Represent the JSON data retrieve from OpenSearch search response.
  2. - *
  3. Represent the Object data extract from the OpenSearch aggregation response.
  4. + *
  5. Represent the JSON data retrieve from OpenSearch search response. + *
  6. Represent the Object data extract from the OpenSearch aggregation response. *
*/ public interface Content { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java index 61da7c3b74..bdb15428e1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import com.fasterxml.jackson.databind.JsonNode; @@ -14,9 +13,7 @@ import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.Pair; -/** - * The Implementation of Content to represent {@link JsonNode}. - */ +/** The Implementation of Content to represent {@link JsonNode}. */ @RequiredArgsConstructor public class OpenSearchJsonContent implements Content { @@ -68,8 +65,7 @@ public Iterator> map() { final JsonNode mapValue = value(); mapValue .fieldNames() - .forEachRemaining( - field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); + .forEachRemaining(field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); return map.entrySet().iterator(); } @@ -133,33 +129,27 @@ public Pair geoValue() { lat = extractDoubleValue(value.get("lat")); } catch (Exception exception) { throw new IllegalStateException( - "latitude must be number value, but got value: " + value.get( - "lat")); + "latitude must be number value, but got value: " + value.get("lat")); } try { lon = extractDoubleValue(value.get("lon")); } catch (Exception exception) { throw new IllegalStateException( - "longitude must be number value, but got value: " + value.get( - "lon")); + "longitude must be number value, but got value: " + value.get("lon")); } return Pair.of(lat, lon); } else { - throw new IllegalStateException("geo point must in format of {\"lat\": number, \"lon\": " - + "number}"); + throw new IllegalStateException( + "geo point must in format of {\"lat\": number, \"lon\": number}"); } } - /** - * Getter for value. If value is array the whole array is returned. - */ + /** Getter for value. If value is array the whole array is returned. */ private JsonNode value() { return value; } - /** - * Get doubleValue from JsonNode if possible. - */ + /** Get doubleValue from JsonNode if possible. */ private Double extractDoubleValue(JsonNode node) { if (node.isTextual()) { return Double.valueOf(node.textValue()); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java index a17deb7e45..30b3784bfc 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import java.util.Objects; @@ -14,7 +13,7 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchIpType; /** - * OpenSearch IP ExprValue. + * OpenSearch IP ExprValue
* Todo, add this to avoid the unknown value type exception, the implementation will be changed. */ @RequiredArgsConstructor diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java index d093588168..fb696d6b04 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Expression Text Value, it is a extension of the ExprValue by OpenSearch. - */ +/** Expression Text Value, it is a extension of the ExprValue by OpenSearch. */ public class OpenSearchExprTextValue extends ExprStringValue { public OpenSearchExprTextValue(String value) { super(value); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java index 4e3e1ec5c0..22c2ece4a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -71,18 +70,15 @@ import org.opensearch.sql.opensearch.data.utils.OpenSearchJsonContent; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * Construct ExprValue from OpenSearch response. - */ +/** Construct ExprValue from OpenSearch response. */ public class OpenSearchExprValueFactory { - /** - * The Mapping of Field and ExprType. - */ + /** The Mapping of Field and ExprType. */ private final Map typeMapping; /** - * Extend existing mapping by new data without overwrite. - * Called from aggregation only {@see AggregationQueryBuilder#buildTypeMapping}. + * Extend existing mapping by new data without overwrite. Called from aggregation only {@see + * AggregationQueryBuilder#buildTypeMapping}. + * * @param typeMapping A data type mapping produced by aggregation. */ public void extendTypeMapping(Map typeMapping) { @@ -95,9 +91,7 @@ public void extendTypeMapping(Map typeMapping) { } } - @Getter - @Setter - private OpenSearchAggregationResponseParser parser; + @Getter @Setter private OpenSearchAggregationResponseParser parser; private static final String TOP_PATH = ""; @@ -105,48 +99,62 @@ public void extendTypeMapping(Map typeMapping) { private static final Map> typeActionMap = new ImmutableMap.Builder>() - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), (c, dt) -> new ExprIntegerValue(c.intValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), (c, dt) -> new ExprLongValue(c.longValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), (c, dt) -> new ExprShortValue(c.shortValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), (c, dt) -> new ExprByteValue(c.byteValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), (c, dt) -> new ExprFloatValue(c.floatValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), (c, dt) -> new ExprDoubleValue(c.doubleValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), (c, dt) -> new OpenSearchExprTextValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), (c, dt) -> new ExprStringValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), (c, dt) -> ExprBooleanValue.of(c.booleanValue())) - //Handles the creation of DATE, TIME & DATETIME + // Handles the creation of DATE, TIME & DATETIME .put(OpenSearchDateType.of(TIME), OpenSearchExprValueFactory::createOpenSearchDateType) .put(OpenSearchDateType.of(DATE), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(TIMESTAMP), - OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(DATETIME), + .put( + OpenSearchDateType.of(TIMESTAMP), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), + .put( + OpenSearchDateType.of(DATETIME), OpenSearchExprValueFactory::createOpenSearchDateType) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), (c, dt) -> new OpenSearchExprIpValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), - (c, dt) -> new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), - c.geoValue().getRight())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), + (c, dt) -> + new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), c.geoValue().getRight())) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), (c, dt) -> new OpenSearchExprBinaryValue(c.stringValue())) .build(); - /** - * Constructor of OpenSearchExprValueFactory. - */ + /** Constructor of OpenSearchExprValueFactory. */ public OpenSearchExprValueFactory(Map typeMapping) { this.typeMapping = OpenSearchDataType.traverseAndFlatten(typeMapping); } /** + * + * + *
    * The struct construction has the following assumption:
    *  1. The field has OpenSearch Object data type.
    *     See 
@@ -155,19 +163,23 @@ public OpenSearchExprValueFactory(Map typeMapping) {
    *     { "employ",       "STRUCT"  }
    *     { "employ.id",    "INTEGER" }
    *     { "employ.state", "STRING"  }
+   *  
*/ public ExprValue construct(String jsonString, boolean supportArrays) { try { - return parse(new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), TOP_PATH, - Optional.of(STRUCT), supportArrays); + return parse( + new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), + TOP_PATH, + Optional.of(STRUCT), + supportArrays); } catch (JsonProcessingException e) { throw new IllegalStateException(String.format("invalid json: %s.", jsonString), e); } } /** - * Construct ExprValue from field and its value object. Throw exception if trying - * to construct from field of unsupported type. + * Construct ExprValue from field and its value object. Throw exception if trying to construct + * from field of unsupported type.
* Todo, add IP, GeoPoint support after we have function implementation around it. * * @param field field name @@ -179,11 +191,7 @@ public ExprValue construct(String field, Object value, boolean supportArrays) { } private ExprValue parse( - Content content, - String field, - Optional fieldType, - boolean supportArrays - ) { + Content content, String field, Optional fieldType, boolean supportArrays) { if (content.isNull() || !fieldType.isPresent()) { return ExprNullValue.of(); } @@ -207,16 +215,16 @@ private ExprValue parse( } /** - * In OpenSearch, it is possible field doesn't have type definition in mapping. - * but has empty value. For example, {"empty_field": []}. + * In OpenSearch, it is possible field doesn't have type definition in mapping. but has empty + * value. For example, {"empty_field": []}. */ private Optional type(String field) { return Optional.ofNullable(typeMapping.get(field)); } /** - * Parse value with the first matching formatter into {@link ExprValue} - * with corresponding {@link ExprCoreType}. + * Parse value with the first matching formatter into {@link ExprValue} with corresponding {@link + * ExprCoreType}. * * @param value - time as string * @param dataType - field data type @@ -232,12 +240,12 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da TemporalAccessor accessor = formatter.parse(value); ZonedDateTime zonedDateTime = DateFormatters.from(accessor); switch (returnFormat) { - case TIME: return new ExprTimeValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); - case DATE: return new ExprDateValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); - default: return new ExprTimestampValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); + case TIME: + return new ExprTimeValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); + case DATE: + return new ExprDateValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); + default: + return new ExprTimestampValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); } } catch (IllegalArgumentException ignored) { // nothing to do, try another format @@ -247,19 +255,22 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da // if no formatters are available, try the default formatter try { switch (returnFormat) { - case TIME: return new ExprTimeValue( - DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); - case DATE: return new ExprDateValue( - DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); - default: return new ExprTimestampValue( - DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); + case TIME: + return new ExprTimeValue( + DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); + case DATE: + return new ExprDateValue( + DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); + default: + return new ExprTimestampValue( + DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); } } catch (DateTimeParseException ignored) { // ignored } - throw new IllegalArgumentException(String.format( - "Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); + throw new IllegalArgumentException( + String.format("Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); } private static ExprValue createOpenSearchDateType(Content value, ExprType type) { @@ -270,8 +281,8 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) var numFormatters = dt.getNumericNamedFormatters(); if (numFormatters.size() > 0 || !dt.hasFormats()) { long epochMillis = 0; - if (numFormatters.contains(DateFormatter.forPattern( - FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { + if (numFormatters.contains( + DateFormatter.forPattern(FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { // no CamelCase for `EPOCH_*` formats epochMillis = value.longValue() * 1000; } else /* EPOCH_MILLIS */ { @@ -279,9 +290,12 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) } Instant instant = Instant.ofEpochMilli(epochMillis); switch ((ExprCoreType) returnFormat) { - case TIME: return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); - case DATE: return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); - default: return new ExprTimestampValue(instant); + case TIME: + return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); + case DATE: + return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); + default: + return new ExprTimestampValue(instant); } } else { // custom format @@ -297,6 +311,7 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) /** * Parse struct content. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param supportArrays Parsing the whole array if array is type nested. @@ -304,15 +319,23 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) */ private ExprValue parseStruct(Content content, String prefix, boolean supportArrays) { LinkedHashMap result = new LinkedHashMap<>(); - content.map().forEachRemaining(entry -> result.put(entry.getKey(), - parse(entry.getValue(), - makeField(prefix, entry.getKey()), - type(makeField(prefix, entry.getKey())), supportArrays))); + content + .map() + .forEachRemaining( + entry -> + result.put( + entry.getKey(), + parse( + entry.getValue(), + makeField(prefix, entry.getKey()), + type(makeField(prefix, entry.getKey())), + supportArrays))); return new ExprTupleValue(result); } /** * Parse array content. Can also parse nested which isn't necessarily an array. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param type Type of content parsing. @@ -320,32 +343,31 @@ private ExprValue parseStruct(Content content, String prefix, boolean supportArr * @return Value parsed from content. */ private ExprValue parseArray( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { List result = new ArrayList<>(); // ARRAY is mapped to nested but can take the json structure of an Object. if (content.objectValue() instanceof ObjectNode) { result.add(parseStruct(content, prefix, supportArrays)); // non-object type arrays are only supported when parsing inner_hits of OS response. - } else if ( - !(type instanceof OpenSearchDataType + } else if (!(type instanceof OpenSearchDataType && ((OpenSearchDataType) type).getExprType().equals(ARRAY)) && !supportArrays) { return parseInnerArrayValue(content.array().next(), prefix, type, supportArrays); } else { - content.array().forEachRemaining(v -> { - result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); - }); + content + .array() + .forEachRemaining( + v -> { + result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); + }); } return new ExprCollectionValue(result); } /** * Parse inner array value. Can be object type and recurse continues. + * * @param content Array index being parsed. * @param prefix Prefix for value. * @param type Type of inner array value. @@ -353,11 +375,7 @@ private ExprValue parseArray( * @return Inner array value. */ private ExprValue parseInnerArrayValue( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { if (type instanceof OpenSearchIpType || type instanceof OpenSearchBinaryType || type instanceof OpenSearchDateType @@ -382,6 +400,7 @@ private ExprValue parseInnerArrayValue( /** * Make complete path string for field. + * * @param path Path of field. * @param field Field to append to path. * @return Field appended to path level. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java index 9c6fcdb825..dbe91dc398 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java @@ -18,9 +18,7 @@ import org.opensearch.sql.executor.execution.AbstractPlan; import org.opensearch.threadpool.ThreadPool; -/** - * QueryManager implemented in OpenSearch cluster. - */ +/** QueryManager implemented in OpenSearch cluster. */ @RequiredArgsConstructor public class OpenSearchQueryManager implements QueryManager { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java index 4c02affc5e..e3bc48ba72 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import java.io.IOException; @@ -19,36 +18,23 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * A PhysicalPlan which will run the delegate plan in resource protection manner. - */ +/** A PhysicalPlan which will run the delegate plan in resource protection manner. */ @ToString @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class ResourceMonitorPlan extends PhysicalPlan implements SerializablePlan { - /** - * How many method calls to delegate's next() to perform resource check once. - */ + /** How many method calls to delegate's next() to perform resource check once. */ public static final long NUMBER_OF_NEXT_CALL_TO_CHECK = 1000; - /** - * Delegated PhysicalPlan. - */ + /** Delegated PhysicalPlan. */ private final PhysicalPlan delegate; - /** - * ResourceMonitor. - */ - @ToString.Exclude - private final ResourceMonitor monitor; - - /** - * Count how many calls to delegate's next() already. - */ - @EqualsAndHashCode.Exclude - private long nextCallCount = 0L; + /** ResourceMonitor. */ + @ToString.Exclude private final ResourceMonitor monitor; + /** Count how many calls to delegate's next() already. */ + @EqualsAndHashCode.Exclude private long nextCallCount = 0L; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java index c0a4aeb0b7..4b7b6c5dcb 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import com.google.common.annotations.VisibleForTesting; @@ -11,9 +10,7 @@ import lombok.NoArgsConstructor; import lombok.extern.log4j.Log4j2; -/** - * OpenSearch Memory Monitor. - */ +/** OpenSearch Memory Monitor. */ @Log4j2 public class OpenSearchMemoryHealthy { private final RandomFail randomFail; @@ -25,16 +22,12 @@ public OpenSearchMemoryHealthy() { } @VisibleForTesting - public OpenSearchMemoryHealthy( - RandomFail randomFail, - MemoryUsage memoryUsage) { + public OpenSearchMemoryHealthy(RandomFail randomFail, MemoryUsage memoryUsage) { this.randomFail = randomFail; this.memoryUsage = memoryUsage; } - /** - * Is Memory Healthy. Calculate based on the current heap memory usage. - */ + /** Is Memory Healthy. Calculate based on the current heap memory usage. */ public boolean isMemoryHealthy(long limitBytes) { final long memoryUsage = this.memoryUsage.usage(); log.debug("Memory usage:{}, limit:{}", memoryUsage, limitBytes); @@ -66,12 +59,8 @@ public long usage() { } @NoArgsConstructor - public static class MemoryUsageExceedFastFailureException extends RuntimeException { - - } + public static class MemoryUsageExceedFastFailureException extends RuntimeException {} @NoArgsConstructor - public static class MemoryUsageExceedException extends RuntimeException { - - } + public static class MemoryUsageExceedException extends RuntimeException {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java index 5ed82c7a5d..3990fef7b7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import io.github.resilience4j.core.IntervalFunction; @@ -17,7 +16,7 @@ /** * {@link ResourceMonitor} implementation on Elasticsearch. When the heap memory usage exceeds - * certain threshold, the monitor is not healthy. + * certain threshold, the monitor is not healthy.
* Todo, add metrics. */ @Log4j2 @@ -26,20 +25,15 @@ public class OpenSearchResourceMonitor extends ResourceMonitor { private final Retry retry; private final OpenSearchMemoryHealthy memoryMonitor; - /** - * Constructor of ElasticsearchCircuitBreaker. - */ - public OpenSearchResourceMonitor( - Settings settings, - OpenSearchMemoryHealthy memoryMonitor) { + /** Constructor. */ + public OpenSearchResourceMonitor(Settings settings, OpenSearchMemoryHealthy memoryMonitor) { this.settings = settings; RetryConfig config = RetryConfig.custom() .maxAttempts(3) .intervalFunction(IntervalFunction.ofExponentialRandomBackoff(1000)) .retryExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) - .ignoreExceptions( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) + .ignoreExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) .build(); retry = Retry.of("mem", config); this.memoryMonitor = memoryMonitor; @@ -55,9 +49,7 @@ public boolean isHealthy() { try { ByteSizeValue limit = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); Supplier booleanSupplier = - Retry.decorateSupplier(retry, - () -> memoryMonitor - .isMemoryHealthy(limit.getBytes())); + Retry.decorateSupplier(retry, () -> memoryMonitor.isMemoryHealthy(limit.getBytes())); return booleanSupplier.get(); } catch (Exception e) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java index 919596eee2..6447a3ff65 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -33,49 +32,31 @@ @ToString public class OpenSearchQueryRequest implements OpenSearchRequest { - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + /** List of includes expected in the response. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; - /** - * List of includes expected in the response. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; - - /** - * Indicate the search already done. - */ + /** Indicate the search already done. */ private boolean searchDone = false; - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(String indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + String indexName, int size, OpenSearchExprValueFactory factory, List includes) { this(new IndexName(indexName), size, factory, includes); } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, int size, OpenSearchExprValueFactory factory, List includes) { this.indexName = indexName; this.sourceBuilder = new SearchSourceBuilder(); sourceBuilder.from(0); @@ -85,11 +66,12 @@ public OpenSearchQueryRequest(IndexName indexName, int size, this.includes = includes; } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory factory, + List includes) { this.indexName = indexName; this.sourceBuilder = sourceBuilder; this.exprValueFactory = factory; @@ -97,22 +79,24 @@ public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBui } @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { if (searchDone) { return new OpenSearchResponse(SearchHits.empty(), exprValueFactory, includes); } else { searchDone = true; return new OpenSearchResponse( - searchAction.apply(new SearchRequest() - .indices(indexName.getIndexNames()) - .source(sourceBuilder)), exprValueFactory, includes); + searchAction.apply( + new SearchRequest().indices(indexName.getIndexNames()).source(sourceBuilder)), + exprValueFactory, + includes); } } @Override public void clean(Consumer cleanAction) { - //do nothing. + // do nothing. } @Override @@ -122,7 +106,7 @@ public boolean hasAnotherBatch() { @Override public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("OpenSearchQueryRequest serialization " - + "is not implemented."); + throw new UnsupportedOperationException( + "OpenSearchQueryRequest serialization is not implemented."); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java index 5c9d0033c1..f775d55296 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -20,14 +19,10 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.OpenSearchResponse; -/** - * OpenSearch search request. - */ +/** OpenSearch search request. */ public interface OpenSearchRequest extends Writeable { - /** - * Default query timeout in minutes. - */ + /** Default query timeout in minutes. */ TimeValue DEFAULT_QUERY_TIMEOUT = TimeValue.timeValueMinutes(1L); /** @@ -37,8 +32,9 @@ public interface OpenSearchRequest extends Writeable { * @param scrollAction scroll search action. * @return OpenSearchResponse. */ - OpenSearchResponse search(Function searchAction, - Function scrollAction); + OpenSearchResponse search( + Function searchAction, + Function scrollAction); /** * Apply the cleanAction on request. @@ -49,21 +45,20 @@ OpenSearchResponse search(Function searchAction, /** * Get the OpenSearchExprValueFactory. + * * @return OpenSearchExprValueFactory. */ OpenSearchExprValueFactory getExprValueFactory(); /** * Check if there is more data to get from OpenSearch. - * @return True if calling {@ref OpenSearchClient.search} with this request will - * return non-empty response. + * + * @return True if calling {@ref OpenSearchClient.search} with this request will return non-empty + * response. */ boolean hasAnotherBatch(); - /** - * OpenSearch Index Name. - * Indices are separated by ",". - */ + /** OpenSearch Index Name. Indices are separated by ",". */ @EqualsAndHashCode class IndexName implements Writeable { private static final String COMMA = ","; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java index 80259f15d3..1df3dcb183 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static java.util.stream.Collectors.mapping; @@ -47,47 +46,36 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * OpenSearch search request builder. - */ +/** OpenSearch search request builder. */ @EqualsAndHashCode @Getter @ToString public class OpenSearchRequestBuilder { - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * Query size of the request -- how many rows will be returned. - */ + /** Query size of the request -- how many rows will be returned. */ private int requestedTotalSize; - /** - * Size of each page request to return. - */ + /** Size of each page request to return. */ private Integer pageSize = null; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + private int startFrom = 0; - /** - * Constructor. - */ - public OpenSearchRequestBuilder(int requestedTotalSize, - OpenSearchExprValueFactory exprValueFactory) { + /** Constructor. */ + public OpenSearchRequestBuilder( + int requestedTotalSize, OpenSearchExprValueFactory exprValueFactory) { this.requestedTotalSize = requestedTotalSize; - this.sourceBuilder = new SearchSourceBuilder() - .from(startFrom) - .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) - .trackScores(false); + this.sourceBuilder = + new SearchSourceBuilder() + .from(startFrom) + .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) + .trackScores(false); this.exprValueFactory = exprValueFactory; } @@ -96,13 +84,11 @@ public OpenSearchRequestBuilder(int requestedTotalSize, * * @return query request or scroll request */ - public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, - int maxResultWindow, TimeValue scrollTimeout) { + public OpenSearchRequest build( + OpenSearchRequest.IndexName indexName, int maxResultWindow, TimeValue scrollTimeout) { int size = requestedTotalSize; FetchSourceContext fetchSource = this.sourceBuilder.fetchSource(); - List includes = fetchSource != null - ? Arrays.asList(fetchSource.includes()) - : List.of(); + List includes = fetchSource != null ? Arrays.asList(fetchSource.includes()) : List.of(); if (pageSize == null) { if (startFrom + size > maxResultWindow) { sourceBuilder.size(maxResultWindow - startFrom); @@ -118,12 +104,11 @@ public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, throw new UnsupportedOperationException("Non-zero offset is not supported with pagination"); } sourceBuilder.size(pageSize); - return new OpenSearchScrollRequest(indexName, scrollTimeout, - sourceBuilder, exprValueFactory, includes); + return new OpenSearchScrollRequest( + indexName, scrollTimeout, sourceBuilder, exprValueFactory, includes); } } - boolean isBoolFilterQuery(QueryBuilder current) { return (current instanceof BoolQueryBuilder); } @@ -131,7 +116,7 @@ boolean isBoolFilterQuery(QueryBuilder current) { /** * Push down query to DSL request. * - * @param query query request + * @param query query request */ public void pushDownFilter(QueryBuilder query) { QueryBuilder current = sourceBuilder.query(); @@ -142,9 +127,7 @@ public void pushDownFilter(QueryBuilder query) { if (isBoolFilterQuery(current)) { ((BoolQueryBuilder) current).filter(query); } else { - sourceBuilder.query(QueryBuilders.boolQuery() - .filter(current) - .filter(query)); + sourceBuilder.query(QueryBuilders.boolQuery().filter(current).filter(query)); } } @@ -181,9 +164,7 @@ public void pushDownSort(List> sortBuilders) { } } - /** - * Pushdown size (limit) and from (offset) to DSL request. - */ + /** Pushdown size (limit) and from (offset) to DSL request. */ public void pushDownLimit(Integer limit, Integer offset) { requestedTotalSize = limit; startFrom = offset; @@ -200,6 +181,7 @@ public void pushDownPageSize(int pageSize) { /** * Add highlight to DSL requests. + * * @param field name of the field to highlight */ public void pushDownHighlight(String field, Map arguments) { @@ -208,32 +190,34 @@ public void pushDownHighlight(String field, Map arguments) { // OS does not allow duplicates of highlight fields if (sourceBuilder.highlighter().fields().stream() .anyMatch(f -> f.name().equals(unquotedField))) { - throw new SemanticCheckException(String.format( - "Duplicate field %s in highlight", field)); + throw new SemanticCheckException(String.format("Duplicate field %s in highlight", field)); } sourceBuilder.highlighter().field(unquotedField); } else { - HighlightBuilder highlightBuilder = - new HighlightBuilder().field(unquotedField); + HighlightBuilder highlightBuilder = new HighlightBuilder().field(unquotedField); sourceBuilder.highlighter(highlightBuilder); } // lastFieldIndex denotes previously set highlighter with field parameter int lastFieldIndex = sourceBuilder.highlighter().fields().size() - 1; if (arguments.containsKey("pre_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .preTags(arguments.get("pre_tags").toString()); } if (arguments.containsKey("post_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .postTags(arguments.get("post_tags").toString()); } } - /** - * Push down project list to DSL requests. - */ + /** Push down project list to DSL requests. */ public void pushDownProjects(Set projects) { sourceBuilder.fetchSource( projects.stream().map(ReferenceExpression::getAttr).distinct().toArray(String[]::new), @@ -254,21 +238,22 @@ private boolean isSortByDocOnly() { /** * Push down nested to sourceBuilder. + * * @param nestedArgs : Nested arguments to push down. */ public void pushDownNested(List> nestedArgs) { initBoolQueryFilter(); List nestedQueries = extractNestedQueries(query()); - groupFieldNamesByPath(nestedArgs).forEach( - (path, fieldNames) -> - buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path)) - ); + groupFieldNamesByPath(nestedArgs) + .forEach( + (path, fieldNames) -> + buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path))); } /** - * InnerHit must be added to the NestedQueryBuilder. We need to extract - * the nested queries currently in the query if there is already a filter - * push down with nested query. + * InnerHit must be added to the NestedQueryBuilder. We need to extract the nested queries + * currently in the query if there is already a filter push down with nested query. + * * @param query : current query. * @return : grouped nested queries currently in query. */ @@ -289,9 +274,7 @@ public int getMaxResponseSize() { return pageSize == null ? requestedTotalSize : pageSize; } - /** - * Initialize bool query for push down. - */ + /** Initialize bool query for push down. */ private void initBoolQueryFilter() { if (sourceBuilder.query() == null) { sourceBuilder.query(QueryBuilders.boolQuery()); @@ -304,44 +287,42 @@ private void initBoolQueryFilter() { /** * Map all field names in nested queries that use same path. + * * @param fields : Fields for nested queries. * @return : Map of path and associated field names. */ private Map> groupFieldNamesByPath( List> fields) { // TODO filter out reverse nested when supported - .filter(not(isReverseNested())) - return fields.stream().collect( - Collectors.groupingBy( - m -> m.get("path").toString(), - mapping( - m -> m.get("field").toString(), - toList() - ) - ) - ); + return fields.stream() + .collect( + Collectors.groupingBy( + m -> m.get("path").toString(), mapping(m -> m.get("field").toString(), toList()))); } /** * Build inner hits portion to nested query. + * * @param paths : Set of all paths used in nested queries. * @param query : Current pushDown query. */ private void buildInnerHit(List paths, NestedQueryBuilder query) { - query.innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, paths.toArray(new String[0]), null) - )); + query.innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, paths.toArray(new String[0]), null))); } /** - * We need to group nested queries with same path for adding new fields with same path of - * inner hits. If we try to add additional inner hits with same path we get an OS error. + * We need to group nested queries with same path for adding new fields with same path of inner + * hits. If we try to add additional inner hits with same path we get an OS error. + * * @param nestedQueries Current list of nested queries in query. * @param path path comparing with current nested queries. * @return Query with same path or new empty nested query. */ private NestedQueryBuilder findNestedQueryWithSamePath( - List nestedQueries, String path - ) { + List nestedQueries, String path) { return nestedQueries.stream() .filter(query -> isSamePath(path, query)) .findAny() @@ -350,6 +331,7 @@ private NestedQueryBuilder findNestedQueryWithSamePath( /** * Check if is nested query is of the same path value. + * * @param path Value of path to compare with nested query. * @param query nested query builder to compare with path. * @return true if nested query has same path. @@ -358,9 +340,7 @@ private boolean isSamePath(String path, NestedQueryBuilder query) { return nestedQuery(path, query.query(), query.scoreMode()).equals(query); } - /** - * Create a nested query with match all filter to place inner hits. - */ + /** Create a nested query with match all filter to place inner hits. */ private Supplier createEmptyNestedQuery(String path) { return () -> { NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); @@ -371,6 +351,7 @@ private Supplier createEmptyNestedQuery(String path) { /** * Return current query. + * * @return : Current source builder query. */ private BoolQueryBuilder query() { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java index 34e8fcd096..c9490f0767 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -41,62 +40,56 @@ public class OpenSearchScrollRequest implements OpenSearchRequest { /** * Search request used to initiate paged (scrolled) search. Not needed to get subsequent pages. */ - @EqualsAndHashCode.Exclude - private final transient SearchRequest initialSearchRequest; + @EqualsAndHashCode.Exclude private final transient SearchRequest initialSearchRequest; + /** Scroll context timeout. */ private final TimeValue scrollTimeout; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; /** Index name. */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; /** * Scroll id which is set after first request issued. Because OpenSearchClient is shared by * multiple threads so this state has to be maintained here. */ - @Setter - @Getter - private String scrollId = NO_SCROLL_ID; + @Setter @Getter private String scrollId = NO_SCROLL_ID; public static final String NO_SCROLL_ID = ""; - @EqualsAndHashCode.Exclude - private boolean needClean = true; + @EqualsAndHashCode.Exclude private boolean needClean = true; - @Getter - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; + @Getter @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; /** Constructor. */ - public OpenSearchScrollRequest(IndexName indexName, - TimeValue scrollTimeout, - SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + public OpenSearchScrollRequest( + IndexName indexName, + TimeValue scrollTimeout, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.indexName = indexName; this.scrollTimeout = scrollTimeout; this.exprValueFactory = exprValueFactory; - this.initialSearchRequest = new SearchRequest() - .indices(indexName.getIndexNames()) - .scroll(scrollTimeout) - .source(sourceBuilder); + this.initialSearchRequest = + new SearchRequest() + .indices(indexName.getIndexNames()) + .scroll(scrollTimeout) + .source(sourceBuilder); this.includes = includes; } - - /** Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. + /** + * Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. */ @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { SearchResponse openSearchResponse; if (isScroll()) { openSearchResponse = scrollAction.apply(scrollRequest()); @@ -172,6 +165,7 @@ public void writeTo(StreamOutput out) throws IOException { /** * Constructs OpenSearchScrollRequest from serialized representation. + * * @param in stream to read data from. * @param engine OpenSearchSqlEngine to get node-specific context. * @throws IOException thrown if reading from input {@code in} fails. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java index a2fbf79624..2969c7639b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import java.util.List; import org.opensearch.sql.data.model.ExprValue; -/** - * OpenSearch system request query against the system index. - */ +/** OpenSearch system request query against the system index. */ public interface OpenSearchSystemRequest { /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java index 03abfbf6c1..e43777a740 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static org.opensearch.sql.opensearch.storage.OpenSearchIndex.METADATAFIELD_TYPE_MAP; @@ -35,52 +34,37 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; -/** - * OpenSearch search response. - */ +/** OpenSearch search response. */ @EqualsAndHashCode @ToString public class OpenSearchResponse implements Iterable { - /** - * Search query result (non-aggregation). - */ + /** Search query result (non-aggregation). */ private final SearchHits hits; - /** - * Search aggregation result. - */ + /** Search aggregation result. */ private final Aggregations aggregations; - /** - * List of requested include fields. - */ + /** List of requested include fields. */ private final List includes; - /** - * OpenSearchExprValueFactory used to build ExprValue from search result. - */ - @EqualsAndHashCode.Exclude - private final OpenSearchExprValueFactory exprValueFactory; + /** OpenSearchExprValueFactory used to build ExprValue from search result. */ + @EqualsAndHashCode.Exclude private final OpenSearchExprValueFactory exprValueFactory; - /** - * Constructor of OpenSearchResponse. - */ - public OpenSearchResponse(SearchResponse searchResponse, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse. */ + public OpenSearchResponse( + SearchResponse searchResponse, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.hits = searchResponse.getHits(); this.aggregations = searchResponse.getAggregations(); this.exprValueFactory = exprValueFactory; this.includes = includes; } - /** - * Constructor of OpenSearchResponse with SearchHits. - */ - public OpenSearchResponse(SearchHits hits, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse with SearchHits. */ + public OpenSearchResponse( + SearchHits hits, OpenSearchExprValueFactory exprValueFactory, List includes) { this.hits = hits; this.aggregations = null; this.exprValueFactory = exprValueFactory; @@ -111,48 +95,52 @@ public Iterator iterator() { return handleAggregationResponse(); } else { return Arrays.stream(hits.getHits()) - .map(hit -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - addParsedHitsToBuilder(builder, hit); - addMetaDataFieldsToBuilder(builder, hit); - addHighlightsToBuilder(builder, hit); - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + .map( + hit -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + addParsedHitsToBuilder(builder, hit); + addMetaDataFieldsToBuilder(builder, hit); + addHighlightsToBuilder(builder, hit); + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } /** - * Parse response for all hits to add to builder. Inner_hits supports arrays of objects - * with nested type. + * Parse response for all hits to add to builder. Inner_hits supports arrays of objects with + * nested type. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addParsedHitsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { builder.putAll( - exprValueFactory.construct( - hit.getSourceAsString(), - !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty()) - ).tupleValue()); + exprValueFactory + .construct( + hit.getSourceAsString(), + !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty())) + .tupleValue()); } /** * If highlight fields are present in response add the fields to the builder. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addHighlightsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { if (!hit.getHighlightFields().isEmpty()) { var hlBuilder = ImmutableMap.builder(); for (var es : hit.getHighlightFields().entrySet()) { - hlBuilder.put(es.getKey(), ExprValueUtils.collectionValue( - Arrays.stream(es.getValue().fragments()).map( - Text::toString).collect(Collectors.toList()))); + hlBuilder.put( + es.getKey(), + ExprValueUtils.collectionValue( + Arrays.stream(es.getValue().fragments()) + .map(Text::toString) + .collect(Collectors.toList()))); } builder.put("_highlight", ExprTupleValue.fromExprValueMap(hlBuilder.build())); } @@ -160,58 +148,56 @@ private void addHighlightsToBuilder( /** * Add metadata fields to builder from response. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addMetaDataFieldsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { - List metaDataFieldSet = includes.stream() - .filter(METADATAFIELD_TYPE_MAP::containsKey) - .collect(Collectors.toList()); - ExprFloatValue maxScore = Float.isNaN(hits.getMaxScore()) - ? null : new ExprFloatValue(hits.getMaxScore()); - - metaDataFieldSet.forEach(metaDataField -> { - if (metaDataField.equals(METADATA_FIELD_INDEX)) { - builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); - } else if (metaDataField.equals(METADATA_FIELD_ID)) { - builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); - } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { - if (!Float.isNaN(hit.getScore())) { - builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); - } - } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { - if (maxScore != null) { - builder.put(METADATA_FIELD_MAXSCORE, maxScore); - } - } else if (metaDataField.equals(METADATA_FIELD_SORT)) { - builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); - } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ - builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); - } - }); + ImmutableMap.Builder builder, SearchHit hit) { + List metaDataFieldSet = + includes.stream().filter(METADATAFIELD_TYPE_MAP::containsKey).collect(Collectors.toList()); + ExprFloatValue maxScore = + Float.isNaN(hits.getMaxScore()) ? null : new ExprFloatValue(hits.getMaxScore()); + + metaDataFieldSet.forEach( + metaDataField -> { + if (metaDataField.equals(METADATA_FIELD_INDEX)) { + builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); + } else if (metaDataField.equals(METADATA_FIELD_ID)) { + builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); + } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { + if (!Float.isNaN(hit.getScore())) { + builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); + } + } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { + if (maxScore != null) { + builder.put(METADATA_FIELD_MAXSCORE, maxScore); + } + } else if (metaDataField.equals(METADATA_FIELD_SORT)) { + builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); + } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ + builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); + } + }); } /** * Handle an aggregation response. + * * @return Parsed and built return values from response. */ private Iterator handleAggregationResponse() { - return exprValueFactory.getParser().parse(aggregations).stream().map(entry -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (Map.Entry value : entry.entrySet()) { - builder.put( - value.getKey(), - exprValueFactory.construct( - value.getKey(), - value.getValue(), - false - ) - ); - } - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + return exprValueFactory.getParser().parse(aggregations).stream() + .map( + entry -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Map.Entry value : entry.entrySet()) { + builder.put( + value.getKey(), + exprValueFactory.construct(value.getKey(), value.getValue(), false)); + } + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java index 384e07ad8f..1492fedfc2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java @@ -23,9 +23,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation; -/** - * {@link NumericMetricsAggregation.SingleValue} metric parser. - */ +/** {@link NumericMetricsAggregation.SingleValue} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class SingleValueParser implements MetricParser { @@ -35,7 +33,6 @@ public class SingleValueParser implements MetricParser { @Override public Map parse(Aggregation agg) { return Collections.singletonMap( - agg.getName(), - handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); + agg.getName(), handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java index c80b75de05..82a2f8648f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java @@ -24,9 +24,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.ExtendedStats; -/** - * {@link ExtendedStats} metric parser. - */ +/** {@link ExtendedStats} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class StatsParser implements MetricParser { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java index a98e1b4ce3..b29b44f033 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.agg; import java.util.Arrays; @@ -16,21 +15,19 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.TopHits; -/** - * {@link TopHits} metric parser. - */ +/** {@link TopHits} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class TopHitsParser implements MetricParser { - @Getter - private final String name; + @Getter private final String name; @Override public Map parse(Aggregation agg) { return Collections.singletonMap( agg.getName(), Arrays.stream(((TopHits) agg).getHits().getHits()) - .flatMap(h -> h.getSourceAsMap().values().stream()).collect(Collectors.toList())); + .flatMap(h -> h.getSourceAsMap().values().stream()) + .collect(Collectors.toList())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java index 953f4d19b4..9ce46c6de6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java @@ -19,6 +19,7 @@ public class Utils { /** * Utils to handle Nan/Infinite Value. + * * @return null if is Nan or is +-Infinity. */ public static Object handleNanInfValue(double value) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java index 0c1b2e58b1..95c52ea275 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.security; import java.security.AccessController; @@ -17,9 +16,7 @@ */ public class SecurityAccess { - /** - * Execute the operation in privileged mode. - */ + /** Execute the operation in privileged mode. */ public static T doPrivileged(final PrivilegedExceptionAction operation) { SpecialPermission.check(); try { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java index 0810312974..133903dabe 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.opensearch.common.settings.Settings.EMPTY; @@ -27,129 +26,172 @@ import org.opensearch.sql.common.setting.LegacySettings; import org.opensearch.sql.common.setting.Settings; -/** - * Setting implementation on OpenSearch. - */ +/** Setting implementation on OpenSearch. */ @Log4j2 public class OpenSearchSettings extends Settings { - /** - * Default settings. - */ + /** Default settings. */ private final Map> defaultSettings; - /** - * Latest setting value for each registered key. Thread-safe is required. - */ + + /** Latest setting value for each registered key. Thread-safe is required. */ @VisibleForTesting private final Map latestSettings = new ConcurrentHashMap<>(); - public static final Setting SQL_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.SQL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_SLOWLOG_SETTING = Setting.intSetting( - Key.SQL_SLOWLOG.getKeyValue(), - LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = Setting.positiveTimeSetting( - Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), - LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_DELETE_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_DELETE_ENABLED.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting PPL_ENABLED_SETTING = Setting.boolSetting( - Key.PPL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.PPL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_MEMORY_LIMIT_SETTING = new Setting<>( - Key.QUERY_MEMORY_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, - (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio( - s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_SIZE_LIMIT_SETTING = Setting.intSetting( - Key.QUERY_SIZE_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_WINDOW_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_WINDOW.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, - 2L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_INTERVAL_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_INTERVAL.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, - 1L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting SQL_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.SQL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_SLOWLOG_SETTING = + Setting.intSetting( + Key.SQL_SLOWLOG.getKeyValue(), + LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = + Setting.positiveTimeSetting( + Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), + LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_DELETE_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_DELETE_ENABLED.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting PPL_ENABLED_SETTING = + Setting.boolSetting( + Key.PPL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.PPL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_MEMORY_LIMIT_SETTING = + new Setting<>( + Key.QUERY_MEMORY_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, + (s) -> + MemorySizeValue.parseBytesSizeValueOrHeapRatio( + s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_SIZE_LIMIT_SETTING = + Setting.intSetting( + Key.QUERY_SIZE_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_WINDOW_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_WINDOW.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_INTERVAL_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_INTERVAL.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, + 1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); // we are keeping this to not break upgrades if the config is already present. // This will be completely removed in 3.0. - public static final Setting DATASOURCE_CONFIG = SecureSetting.secureFile( - "plugins.query.federation.datasources.config", - null, - Setting.Property.Deprecated); - - public static final Setting DATASOURCE_MASTER_SECRET_KEY = Setting.simpleString( - ENCYRPTION_MASTER_KEY.getKeyValue(), - Setting.Property.NodeScope, - Setting.Property.Final, - Setting.Property.Filtered); - - public static final Setting DATASOURCE_URI_ALLOW_HOSTS = Setting.simpleString( - Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), - ".*", - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting DATASOURCE_CONFIG = + SecureSetting.secureFile( + "plugins.query.federation.datasources.config", null, Setting.Property.Deprecated); - /** - * Construct OpenSearchSetting. - * The OpenSearchSetting must be singleton. - */ + public static final Setting DATASOURCE_MASTER_SECRET_KEY = + Setting.simpleString( + ENCYRPTION_MASTER_KEY.getKeyValue(), + Setting.Property.NodeScope, + Setting.Property.Final, + Setting.Property.Filtered); + + public static final Setting DATASOURCE_URI_ALLOW_HOSTS = + Setting.simpleString( + Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), + ".*", + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + /** Construct OpenSearchSetting. The OpenSearchSetting must be singleton. */ @SuppressWarnings("unchecked") public OpenSearchSettings(ClusterSettings clusterSettings) { ImmutableMap.Builder> settingBuilder = new ImmutableMap.Builder<>(); - register(settingBuilder, clusterSettings, Key.SQL_ENABLED, - SQL_ENABLED_SETTING, new Updater(Key.SQL_ENABLED)); - register(settingBuilder, clusterSettings, Key.SQL_SLOWLOG, - SQL_SLOWLOG_SETTING, new Updater(Key.SQL_SLOWLOG)); - register(settingBuilder, clusterSettings, Key.SQL_CURSOR_KEEP_ALIVE, - SQL_CURSOR_KEEP_ALIVE_SETTING, new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); - register(settingBuilder, clusterSettings, Key.SQL_DELETE_ENABLED, - SQL_DELETE_ENABLED_SETTING, new Updater(Key.SQL_DELETE_ENABLED)); - register(settingBuilder, clusterSettings, Key.PPL_ENABLED, - PPL_ENABLED_SETTING, new Updater(Key.PPL_ENABLED)); - register(settingBuilder, clusterSettings, Key.QUERY_MEMORY_LIMIT, - QUERY_MEMORY_LIMIT_SETTING, new Updater(Key.QUERY_MEMORY_LIMIT)); - register(settingBuilder, clusterSettings, Key.QUERY_SIZE_LIMIT, - QUERY_SIZE_LIMIT_SETTING, new Updater(Key.QUERY_SIZE_LIMIT)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_WINDOW, - METRICS_ROLLING_WINDOW_SETTING, new Updater(Key.METRICS_ROLLING_WINDOW)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_INTERVAL, - METRICS_ROLLING_INTERVAL_SETTING, new Updater(Key.METRICS_ROLLING_INTERVAL)); - register(settingBuilder, clusterSettings, Key.DATASOURCES_URI_ALLOWHOSTS, - DATASOURCE_URI_ALLOW_HOSTS, new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); - registerNonDynamicSettings(settingBuilder, clusterSettings, Key.CLUSTER_NAME, - ClusterName.CLUSTER_NAME_SETTING); + register( + settingBuilder, + clusterSettings, + Key.SQL_ENABLED, + SQL_ENABLED_SETTING, + new Updater(Key.SQL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.SQL_SLOWLOG, + SQL_SLOWLOG_SETTING, + new Updater(Key.SQL_SLOWLOG)); + register( + settingBuilder, + clusterSettings, + Key.SQL_CURSOR_KEEP_ALIVE, + SQL_CURSOR_KEEP_ALIVE_SETTING, + new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); + register( + settingBuilder, + clusterSettings, + Key.SQL_DELETE_ENABLED, + SQL_DELETE_ENABLED_SETTING, + new Updater(Key.SQL_DELETE_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.PPL_ENABLED, + PPL_ENABLED_SETTING, + new Updater(Key.PPL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_MEMORY_LIMIT, + QUERY_MEMORY_LIMIT_SETTING, + new Updater(Key.QUERY_MEMORY_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_SIZE_LIMIT, + QUERY_SIZE_LIMIT_SETTING, + new Updater(Key.QUERY_SIZE_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_WINDOW, + METRICS_ROLLING_WINDOW_SETTING, + new Updater(Key.METRICS_ROLLING_WINDOW)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_INTERVAL, + METRICS_ROLLING_INTERVAL_SETTING, + new Updater(Key.METRICS_ROLLING_INTERVAL)); + register( + settingBuilder, + clusterSettings, + Key.DATASOURCES_URI_ALLOWHOSTS, + DATASOURCE_URI_ALLOW_HOSTS, + new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); + registerNonDynamicSettings( + settingBuilder, clusterSettings, Key.CLUSTER_NAME, ClusterName.CLUSTER_NAME_SETTING); defaultSettings = settingBuilder.build(); } @@ -159,36 +201,33 @@ public T getSettingValue(Settings.Key key) { return (T) latestSettings.getOrDefault(key, defaultSettings.get(key).getDefault(EMPTY)); } - /** - * Register the pair of {key, setting}. - */ - private void register(ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, - Setting setting, - Consumer updater) { + /** Register the pair of {key, setting}. */ + private void register( + ImmutableMap.Builder> settingBuilder, + ClusterSettings clusterSettings, + Settings.Key key, + Setting setting, + Consumer updater) { if (clusterSettings.get(setting) != null) { latestSettings.put(key, clusterSettings.get(setting)); } settingBuilder.put(key, setting); - clusterSettings - .addSettingsUpdateConsumer(setting, updater); + clusterSettings.addSettingsUpdateConsumer(setting, updater); } - /** - * Register Non Dynamic Settings without consumer. - */ + /** Register Non Dynamic Settings without consumer. */ private void registerNonDynamicSettings( ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, + ClusterSettings clusterSettings, + Settings.Key key, Setting setting) { settingBuilder.put(key, setting); latestSettings.put(key, clusterSettings.get(setting)); } - /** - * Add the inner class only for UT coverage purpose. - * Lambda could be much elegant solution. But which is hard to test. + * Add the inner class only for UT coverage purpose. Lambda could be much elegant solution. But + * which is hard to test. */ @VisibleForTesting @RequiredArgsConstructor @@ -202,9 +241,7 @@ public void accept(Object newValue) { } } - /** - * Used by Plugin to init Setting. - */ + /** Used by Plugin to init Setting. */ public static List> pluginSettings() { return new ImmutableList.Builder>() .add(SQL_ENABLED_SETTING) @@ -220,9 +257,7 @@ public static List> pluginSettings() { .build(); } - /** - * Init Non Dynamic Plugin Settings. - */ + /** Init Non Dynamic Plugin Settings. */ public static List> pluginNonDynamicSettings() { return new ImmutableList.Builder>() .add(DATASOURCE_MASTER_SECRET_KEY) @@ -230,9 +265,7 @@ public static List> pluginNonDynamicSettings() { .build(); } - /** - * Used by local cluster to get settings from a setting instance. - */ + /** Used by local cluster to get settings from a setting instance. */ public List> getSettings() { return pluginSettings(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java index 62617f744e..c6afdb8511 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import com.google.common.annotations.VisibleForTesting; @@ -47,43 +46,33 @@ public class OpenSearchIndex implements Table { public static final String METADATA_FIELD_ROUTING = "_routing"; - public static final java.util.Map METADATAFIELD_TYPE_MAP = Map.of( - METADATA_FIELD_ID, ExprCoreType.STRING, - METADATA_FIELD_INDEX, ExprCoreType.STRING, - METADATA_FIELD_SCORE, ExprCoreType.FLOAT, - METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, - METADATA_FIELD_SORT, ExprCoreType.LONG, - METADATA_FIELD_ROUTING, ExprCoreType.STRING - ); + public static final java.util.Map METADATAFIELD_TYPE_MAP = + Map.of( + METADATA_FIELD_ID, ExprCoreType.STRING, + METADATA_FIELD_INDEX, ExprCoreType.STRING, + METADATA_FIELD_SCORE, ExprCoreType.FLOAT, + METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, + METADATA_FIELD_SORT, ExprCoreType.LONG, + METADATA_FIELD_ROUTING, ExprCoreType.STRING); /** OpenSearch client connection. */ private final OpenSearchClient client; private final Settings settings; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final OpenSearchRequest.IndexName indexName; - /** - * The cached mapping of field and type in index. - */ + /** The cached mapping of field and type in index. */ private Map cachedFieldOpenSearchTypes = null; - /** - * The cached ExprType of fields. - */ + /** The cached ExprType of fields. */ private Map cachedFieldTypes = null; - /** - * The cached max result window setting of index. - */ + /** The cached max result window setting of index. */ private Integer cachedMaxResultWindow = null; - /** - * Constructor. - */ + /** Constructor. */ public OpenSearchIndex(OpenSearchClient client, Settings settings, String indexName) { this.client = client; this.settings = settings; @@ -113,22 +102,24 @@ public void create(Map schema) { * or lazy evaluate when query engine pulls field type. */ /** - * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} - * it returns a flattened map. + * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} it returns a + * flattened map. + * * @return A map between field names and matching `ExprCoreType`s. */ @Override public Map getFieldTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } if (cachedFieldTypes == null) { - cachedFieldTypes = OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes) - .entrySet().stream().collect( - LinkedHashMap::new, - (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), - Map::putAll); + cachedFieldTypes = + OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes).entrySet().stream() + .collect( + LinkedHashMap::new, + (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), + Map::putAll); } return cachedFieldTypes; } @@ -140,19 +131,18 @@ public Map getReservedFieldTypes() { /** * Get parsed mapping info. + * * @return A complete map between field names and their types. */ public Map getFieldOpenSearchTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } return cachedFieldOpenSearchTypes; } - /** - * Get the max result window setting of the table. - */ + /** Get the max result window setting of the table. */ public Integer getMaxResultWindow() { if (cachedMaxResultWindow == null) { cachedMaxResultWindow = @@ -161,9 +151,7 @@ public Integer getMaxResultWindow() { return cachedMaxResultWindow; } - /** - * TODO: Push down operations to index scan operator as much as possible in future. - */ + /** TODO: Push down operations to index scan operator as much as possible in future. */ @Override public PhysicalPlan implement(LogicalPlan plan) { // TODO: Leave it here to avoid impact Prometheus and AD operators. Need to move to Planner. @@ -175,12 +163,13 @@ public TableScanBuilder createScanBuilder() { final int querySizeLimit = settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT); final TimeValue cursorKeepAlive = settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - var builder = new OpenSearchRequestBuilder( - querySizeLimit, - createExprValueFactory()); + var builder = new OpenSearchRequestBuilder(querySizeLimit, createExprValueFactory()); Function createScanOperator = - requestBuilder -> new OpenSearchIndexScan(client, requestBuilder.getMaxResponseSize(), - requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); + requestBuilder -> + new OpenSearchIndexScan( + client, + requestBuilder.getMaxResponseSize(), + requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); return new OpenSearchIndexScanBuilder(builder, createScanOperator); } @@ -193,27 +182,27 @@ private OpenSearchExprValueFactory createExprValueFactory() { @VisibleForTesting @RequiredArgsConstructor - public static class OpenSearchDefaultImplementor - extends DefaultImplementor { + public static class OpenSearchDefaultImplementor extends DefaultImplementor { private final OpenSearchClient client; @Override public PhysicalPlan visitMLCommons(LogicalMLCommons node, OpenSearchIndexScan context) { - return new MLCommonsOperator(visitChild(node, context), node.getAlgorithm(), - node.getArguments(), client.getNodeClient()); + return new MLCommonsOperator( + visitChild(node, context), + node.getAlgorithm(), + node.getArguments(), + client.getNodeClient()); } @Override public PhysicalPlan visitAD(LogicalAD node, OpenSearchIndexScan context) { - return new ADOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new ADOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } @Override public PhysicalPlan visitML(LogicalML node, OpenSearchIndexScan context) { - return new MLOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new MLOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java index c915fa549b..7c022e2190 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.opensearch.sql.utils.SystemIndexUtils.isSystemIndex; @@ -22,10 +21,9 @@ public class OpenSearchStorageEngine implements StorageEngine { /** OpenSearch client connection. */ - @Getter - private final OpenSearchClient client; - @Getter - private final Settings settings; + @Getter private final OpenSearchClient client; + + @Getter private final Settings settings; @Override public Table getTable(DataSourceSchemaName dataSourceSchemaName, String name) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java index 0ca9cde3d2..b2e9319bb1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import java.io.IOException; @@ -26,9 +25,7 @@ import org.opensearch.sql.planner.SerializablePlan; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchIndexScan extends TableScanOperator implements SerializablePlan { @@ -37,14 +34,10 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab private OpenSearchClient client; /** Search request. */ - @EqualsAndHashCode.Include - @ToString.Include - private OpenSearchRequest request; + @EqualsAndHashCode.Include @ToString.Include private OpenSearchRequest request; /** Largest number of rows allowed in the response. */ - @EqualsAndHashCode.Include - @ToString.Include - private int maxResponseSize; + @EqualsAndHashCode.Include @ToString.Include private int maxResponseSize; /** Number of rows returned. */ private Integer queryCount; @@ -52,12 +45,9 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab /** Search response for current batch. */ private Iterator iterator; - /** - * Creates index scan based on a provided OpenSearchRequestBuilder. - */ - public OpenSearchIndexScan(OpenSearchClient client, - int maxResponseSize, - OpenSearchRequest request) { + /** Creates index scan based on a provided OpenSearchRequestBuilder. */ + public OpenSearchIndexScan( + OpenSearchClient client, int maxResponseSize, OpenSearchRequest request) { this.client = client; this.maxResponseSize = maxResponseSize; this.request = request; @@ -106,12 +96,13 @@ public String explain() { return request.toString(); } - /** No-args constructor. + /** + * No-args constructor. + * * @deprecated Exists only to satisfy Java serialization API. */ @Deprecated(since = "introduction") - public OpenSearchIndexScan() { - } + public OpenSearchIndexScan() {} @Override public void readExternal(ObjectInput in) throws IOException { @@ -119,8 +110,9 @@ public void readExternal(ObjectInput in) throws IOException { byte[] requestStream = new byte[reqSize]; in.read(requestStream); - var engine = (OpenSearchStorageEngine) ((PlanSerializer.CursorDeserializationStream) in) - .resolveObject("engine"); + var engine = + (OpenSearchStorageEngine) + ((PlanSerializer.CursorDeserializationStream) in).resolveObject("engine"); try (BytesStreamInput bsi = new BytesStreamInput(requestStream)) { request = new OpenSearchScrollRequest(bsi, engine); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java index d5f89d2579..02ac21a39d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java @@ -24,9 +24,7 @@ import org.opensearch.sql.planner.logical.LogicalFilter; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. - */ +/** Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. */ @EqualsAndHashCode class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { @@ -42,9 +40,8 @@ class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { /** Sorting items pushed down. */ private List> sortList; - - OpenSearchIndexScanAggregationBuilder(OpenSearchRequestBuilder requestBuilder, - LogicalAggregation aggregation) { + OpenSearchIndexScanAggregationBuilder( + OpenSearchRequestBuilder requestBuilder, LogicalAggregation aggregation) { this.requestBuilder = requestBuilder; aggregatorList = aggregation.getAggregatorList(); groupByList = aggregation.getGroupByList(); @@ -57,8 +54,7 @@ public OpenSearchRequestBuilder build() { Pair, OpenSearchAggregationResponseParser> aggregationBuilder = builder.buildAggregationBuilder(aggregatorList, groupByList, sortList); requestBuilder.pushDownAggregation(aggregationBuilder); - requestBuilder.pushTypeMapping( - builder.buildTypeMapping(aggregatorList, groupByList)); + requestBuilder.pushTypeMapping(builder.buildTypeMapping(aggregatorList, groupByList)); return requestBuilder; } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java index edcbedc7a7..8a2f3e98f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java @@ -30,29 +30,24 @@ public class OpenSearchIndexScanBuilder extends TableScanBuilder { private final Function scanFactory; - /** - * Delegated index scan builder for non-aggregate or aggregate query. - */ - @EqualsAndHashCode.Include - private PushDownQueryBuilder delegate; + + /** Delegated index scan builder for non-aggregate or aggregate query. */ + @EqualsAndHashCode.Include private PushDownQueryBuilder delegate; /** Is limit operator pushed down. */ private boolean isLimitPushedDown = false; - /** - * Constructor used during query execution. - */ - public OpenSearchIndexScanBuilder(OpenSearchRequestBuilder requestBuilder, + /** Constructor used during query execution. */ + public OpenSearchIndexScanBuilder( + OpenSearchRequestBuilder requestBuilder, Function scanFactory) { this.delegate = new OpenSearchIndexScanQueryBuilder(requestBuilder); this.scanFactory = scanFactory; - } - /** - * Constructor used for unit tests. - */ - protected OpenSearchIndexScanBuilder(PushDownQueryBuilder translator, + /** Constructor used for unit tests. */ + protected OpenSearchIndexScanBuilder( + PushDownQueryBuilder translator, Function scanFactory) { this.delegate = translator; this.scanFactory = scanFactory; @@ -117,13 +112,16 @@ public boolean pushDownNested(LogicalNested nested) { /** * Valid if sorting is only by fields. + * * @param sort Logical sort * @return True if sorting by fields only */ private boolean sortByFieldsOnly(LogicalSort sort) { return sort.getSortList().stream() - .map(sortItem -> sortItem.getRight() instanceof ReferenceExpression - || isNestedFunction(sortItem.getRight())) + .map( + sortItem -> + sortItem.getRight() instanceof ReferenceExpression + || isNestedFunction(sortItem.getRight())) .reduce(true, Boolean::logicalAnd); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java index 590272a9f1..f4b0b05256 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java @@ -35,8 +35,8 @@ import org.opensearch.sql.planner.logical.LogicalSort; /** - * Index scan builder for simple non-aggregate query used by - * {@link OpenSearchIndexScanBuilder} internally. + * Index scan builder for simple non-aggregate query used by {@link OpenSearchIndexScanBuilder} + * internally. */ @VisibleForTesting @EqualsAndHashCode @@ -50,13 +50,11 @@ public OpenSearchIndexScanQueryBuilder(OpenSearchRequestBuilder requestBuilder) @Override public boolean pushDownFilter(LogicalFilter filter) { - FilterQueryBuilder queryBuilder = new FilterQueryBuilder( - new DefaultExpressionSerializer()); + FilterQueryBuilder queryBuilder = new FilterQueryBuilder(new DefaultExpressionSerializer()); Expression queryCondition = filter.getCondition(); QueryBuilder query = queryBuilder.build(queryCondition); requestBuilder.pushDownFilter(query); - requestBuilder.pushDownTrackedScore( - trackScoresFromOpenSearchFunction(queryCondition)); + requestBuilder.pushDownTrackedScore(trackScoresFromOpenSearchFunction(queryCondition)); return true; } @@ -64,9 +62,10 @@ public boolean pushDownFilter(LogicalFilter filter) { public boolean pushDownSort(LogicalSort sort) { List> sortList = sort.getSortList(); final SortQueryBuilder builder = new SortQueryBuilder(); - requestBuilder.pushDownSort(sortList.stream() - .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) - .collect(Collectors.toList())); + requestBuilder.pushDownSort( + sortList.stream() + .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) + .collect(Collectors.toList())); return true; } @@ -78,8 +77,7 @@ public boolean pushDownLimit(LogicalLimit limit) { @Override public boolean pushDownProject(LogicalProject project) { - requestBuilder.pushDownProjects( - findReferenceExpressions(project.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(project.getProjectList())); // Return false intentionally to keep the original project operator return false; @@ -105,8 +103,8 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { return true; } if (condition instanceof FunctionExpression) { - return ((FunctionExpression) condition).getArguments().stream() - .anyMatch(this::trackScoresFromOpenSearchFunction); + return ((FunctionExpression) condition) + .getArguments().stream().anyMatch(this::trackScoresFromOpenSearchFunction); } return false; } @@ -114,8 +112,7 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { @Override public boolean pushDownNested(LogicalNested nested) { requestBuilder.pushDownNested(nested.getFields()); - requestBuilder.pushDownProjects( - findReferenceExpressions(nested.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(nested.getProjectList())); // Return false intentionally to keep the original nested operator // Since we return false we need to pushDownProject here as it won't be // pushed down due to no matching push down rule. @@ -130,8 +127,8 @@ public OpenSearchRequestBuilder build() { /** * Find reference expression from expression. - * @param expressions a list of expression. * + * @param expressions a list of expression. * @return a set of ReferenceExpression */ public static Set findReferenceExpressions( @@ -145,18 +142,20 @@ public static Set findReferenceExpressions( /** * Find reference expression from expression. - * @param expression expression. * + * @param expression expression. * @return a list of ReferenceExpression */ public static List findReferenceExpression(NamedExpression expression) { List results = new ArrayList<>(); - expression.accept(new ExpressionNodeVisitor<>() { - @Override - public Object visitReference(ReferenceExpression node, Object context) { - return results.add(node); - } - }, null); + expression.accept( + new ExpressionNodeVisitor<>() { + @Override + public Object visitReference(ReferenceExpression node, Object context) { + return results.add(node); + } + }, + null); return results; } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java index 274bc4647d..b855b9a8b5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java @@ -14,9 +14,7 @@ import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Translates a logical query plan into OpenSearch DSL and an appropriate request. - */ +/** Translates a logical query plan into OpenSearch DSL and an appropriate request. */ public interface PushDownQueryBuilder { default boolean pushDownFilter(LogicalFilter filter) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java index 7b68bd5c92..a485296b52 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import lombok.experimental.UtilityClass; @@ -12,6 +11,7 @@ public class StringUtils { /** * Converts sql wildcard character % and _ to * and ?. + * * @param text string to be converted * @return converted string */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java index a1b633f942..753c2bbbc7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java @@ -40,10 +40,12 @@ public abstract class LuceneQuery { /** * Check if function expression supported by current Lucene query. Default behavior is that report * supported if: + * *
    - *
  1. Left is a reference
  2. - *
  3. Right side is a literal
  4. + *
  5. Left is a reference + *
  6. Right side is a literal *
+ * * @param func function * @return return true if supported, otherwise false. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java index 7e13cad592..2e33e3cc7c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import lombok.RequiredArgsConstructor; @@ -14,19 +13,19 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Lucene query that builds range query for non-quality comparison. - */ +/** Lucene query that builds range query for non-quality comparison. */ @RequiredArgsConstructor public class RangeQuery extends LuceneQuery { public enum Comparison { - LT, GT, LTE, GTE, BETWEEN + LT, + GT, + LTE, + GTE, + BETWEEN } - /** - * Comparison that range query build for. - */ + /** Comparison that range query build for. */ private final Comparison comparison; @Override @@ -55,5 +54,4 @@ private Object value(ExprValue literal) { return literal.value(); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java index c98de1cd84..cd506898d7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import org.opensearch.index.query.QueryBuilder; @@ -13,9 +12,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Lucene query that build term query for equality comparison. - */ +/** Lucene query that build term query for equality comparison. */ public class TermQuery extends LuceneQuery { @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java index 35d5a43a41..0346b7712e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java @@ -8,16 +8,14 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the 'query' query. - */ +/** Class for Lucene query that builds the 'query' query. */ public class QueryQuery extends NoFieldQuery { private final String queryQueryName = "query"; /** - * Default constructor for QueryQuery configures how RelevanceQuery.build() handles - * named arguments by calling the constructor of QueryStringQuery. + * Default constructor for QueryQuery configures how RelevanceQuery.build() handles named + * arguments by calling the constructor of QueryStringQuery. */ public QueryQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java index 43131baa3e..410c55cea6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java @@ -9,13 +9,11 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the query_string query. - */ +/** Class for Lucene query that builds the query_string query. */ public class QueryStringQuery extends MultiFieldQuery { /** - * Default constructor for QueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for QueryString configures how RelevanceQuery.build() handles named + * arguments. */ public QueryStringQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); @@ -29,8 +27,8 @@ public QueryStringQuery() { * @return : Builder for query_string query */ @Override - protected QueryStringQueryBuilder createBuilder(ImmutableMap fields, - String query) { + protected QueryStringQueryBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.queryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java index b8641a5c0b..87faf320ec 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java @@ -20,37 +20,39 @@ import org.opensearch.sql.expression.NamedArgumentExpression; import org.opensearch.sql.opensearch.storage.script.filter.lucene.LuceneQuery; -/** - * Base class for query abstraction that builds a relevance query from function expression. - */ +/** Base class for query abstraction that builds a relevance query from function expression. */ @RequiredArgsConstructor public abstract class RelevanceQuery extends LuceneQuery { - @Getter - private final Map> queryBuildActions; + @Getter private final Map> queryBuildActions; protected void ignoreArguments(List arguments) { - arguments.removeIf(a -> a.getArgName().equalsIgnoreCase("field") - || a.getArgName().equalsIgnoreCase("fields") - || a.getArgName().equalsIgnoreCase("query")); + arguments.removeIf( + a -> + a.getArgName().equalsIgnoreCase("field") + || a.getArgName().equalsIgnoreCase("fields") + || a.getArgName().equalsIgnoreCase("query")); } protected void checkValidArguments(String argNormalized, T queryBuilder) { if (!queryBuildActions.containsKey(argNormalized)) { throw new SemanticCheckException( - String.format("Parameter %s is invalid for %s function.", - argNormalized, queryBuilder.getWriteableName())); + String.format( + "Parameter %s is invalid for %s function.", + argNormalized, queryBuilder.getWriteableName())); } } protected T loadArguments(List arguments) throws SemanticCheckException { // Aggregate parameters by name, so getting a Map - arguments.stream().collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) - .forEach((k, v) -> { - if (v.size() > 1) { - throw new SemanticCheckException( - String.format("Parameter '%s' can only be specified once.", k)); - } - }); + arguments.stream() + .collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) + .forEach( + (k, v) -> { + if (v.size() > 1) { + throw new SemanticCheckException( + String.format("Parameter '%s' can only be specified once.", k)); + } + }); T queryBuilder = createQueryBuilder(arguments); @@ -63,9 +65,7 @@ protected T loadArguments(List arguments) throws Semant checkValidArguments(argNormalized, queryBuilder); - (Objects.requireNonNull( - queryBuildActions - .get(argNormalized))) + (Objects.requireNonNull(queryBuildActions.get(argNormalized))) .apply(queryBuilder, arg.getValue().valueOf()); } @@ -74,15 +74,16 @@ protected T loadArguments(List arguments) throws Semant @Override public QueryBuilder build(FunctionExpression func) { - var arguments = func.getArguments().stream() - .map(a -> (NamedArgumentExpression)a).collect(Collectors.toList()); + var arguments = + func.getArguments().stream() + .map(a -> (NamedArgumentExpression) a) + .collect(Collectors.toList()); if (arguments.size() < 2) { throw new SyntaxCheckException( String.format("%s requires at least two parameters", getQueryName())); } return loadArguments(arguments); - } protected abstract T createQueryBuilder(List arguments); @@ -90,12 +91,10 @@ public QueryBuilder build(FunctionExpression func) { protected abstract String getQueryName(); /** - * Convenience interface for a function that updates a QueryBuilder - * based on ExprValue. + * Convenience interface for a function that updates a QueryBuilder based on ExprValue. * * @param Concrete query builder */ - protected interface QueryBuilderStep extends - BiFunction { - } + protected interface QueryBuilderStep + extends BiFunction {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java index 157921572a..86dd44c118 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java @@ -11,16 +11,16 @@ public class SimpleQueryStringQuery extends MultiFieldQuery { /** - * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles named + * arguments. */ public SimpleQueryStringQuery() { super(FunctionParameterRepository.SimpleQueryStringQueryBuildActions); } @Override - protected SimpleQueryStringBuilder createBuilder(ImmutableMap fields, - String query) { + protected SimpleQueryStringBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.simpleQueryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java index ec110dfd8b..086aaddc5e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java @@ -26,18 +26,20 @@ public SingleFieldQuery(Map> queryBuildActions) { @Override protected T createQueryBuilder(List arguments) { // Extract 'field' and 'query' - var field = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("field")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); + var field = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("field")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); - var query = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("query")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); return createBuilder( - ((ReferenceExpression)field.getValue()).getAttr(), + ((ReferenceExpression) field.getValue()).getAttr(), query.getValue().valueOf().stringValue()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java index 9fd37e3de7..7b9887e516 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java @@ -3,20 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.sql.opensearch.storage.script.StringUtils; -/** - * Lucene query that builds wildcard query. - */ +/** Lucene query that builds wildcard query. */ public class WildcardQuery extends SingleFieldQuery { /** - * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles named + * arguments. */ public WildcardQuery() { super(FunctionParameterRepository.WildcardQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java index 9002df7c8f..7669b569d4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.opensearch.sql.analysis.NestedAnalyzer.generatePath; @@ -22,23 +21,17 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Builder of {@link SortBuilder}. - */ +/** Builder of {@link SortBuilder}. */ public class SortQueryBuilder { - /** - * The mapping between Core Engine sort order and OpenSearch sort order. - */ + /** The mapping between Core Engine sort order and OpenSearch sort order. */ private Map sortOrderMap = new ImmutableMap.Builder() .put(Sort.SortOrder.ASC, SortOrder.ASC) .put(Sort.SortOrder.DESC, SortOrder.DESC) .build(); - /** - * The mapping between Core Engine null order and OpenSearch null order. - */ + /** The mapping between Core Engine null order and OpenSearch null order. */ private Map missingMap = new ImmutableMap.Builder() .put(Sort.NullOrder.NULL_FIRST, "_first") @@ -61,14 +54,15 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { } else if (isNestedFunction(expression)) { validateNestedArgs((FunctionExpression) expression); - String orderByName = ((FunctionExpression)expression).getArguments().get(0).toString(); + String orderByName = ((FunctionExpression) expression).getArguments().get(0).toString(); // Generate path if argument not supplied in function. - ReferenceExpression path = ((FunctionExpression)expression).getArguments().size() == 2 - ? (ReferenceExpression) ((FunctionExpression)expression).getArguments().get(1) - : generatePath(orderByName); + ReferenceExpression path = + ((FunctionExpression) expression).getArguments().size() == 2 + ? (ReferenceExpression) ((FunctionExpression) expression).getArguments().get(1) + : generatePath(orderByName); return SortBuilders.fieldSort(orderByName) - .order(sortOrderMap.get(option.getSortOrder())) - .setNestedSort(new NestedSortBuilder(path.toString())); + .order(sortOrderMap.get(option.getSortOrder())) + .setNestedSort(new NestedSortBuilder(path.toString())); } else { throw new IllegalStateException("unsupported expression " + expression.getClass()); } @@ -76,29 +70,26 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { /** * Validate semantics for arguments in nested function. + * * @param nestedFunc Nested function expression. */ private void validateNestedArgs(FunctionExpression nestedFunc) { if (nestedFunc.getArguments().size() < 1 || nestedFunc.getArguments().size() > 2) { throw new IllegalArgumentException( - "nested function supports 2 parameters (field, path) or 1 parameter (field)" - ); + "nested function supports 2 parameters (field, path) or 1 parameter (field)"); } for (Expression arg : nestedFunc.getArguments()) { if (!(arg instanceof ReferenceExpression)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", - arg.toString() - ) - ); + String.format("Illegal nested field name: %s", arg.toString())); } } } private FieldSortBuilder fieldBuild(ReferenceExpression ref, Sort.SortOption option) { return SortBuilders.fieldSort( - OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) + OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) .order(sortOrderMap.get(option.getSortOrder())) .missing(missingMap.get(option.getNullOrder())); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java index 7b6efeeba4..b1b2081f94 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.utils.SystemIndexUtils.systemTable; @@ -24,17 +23,12 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; -/** - * OpenSearch System Index Table Implementation. - */ +/** OpenSearch System Index Table Implementation. */ public class OpenSearchSystemIndex implements Table { - /** - * System Index Name. - */ + /** System Index Name. */ private final Pair systemIndexBundle; - public OpenSearchSystemIndex( - OpenSearchClient client, String indexName) { + public OpenSearchSystemIndex(OpenSearchClient client, String indexName) { this.systemIndexBundle = buildIndexBundle(client, indexName); } @@ -61,8 +55,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public class OpenSearchSystemIndexDefaultImplementor - extends DefaultImplementor { + public class OpenSearchSystemIndexDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitRelation(LogicalRelation node, Object context) { @@ -79,10 +72,11 @@ private Pair buildIndexBun OpenSearchClient client, String indexName) { SystemIndexUtils.SystemTable systemTable = systemTable(indexName); if (systemTable.isSystemInfoTable()) { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, - new OpenSearchCatIndicesRequest(client)); + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, new OpenSearchCatIndicesRequest(client)); } else { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, new OpenSearchDescribeIndexRequest(client, systemTable.getTableName())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java index ee377263c1..57cdd52985 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import java.util.Iterator; @@ -14,21 +13,15 @@ import org.opensearch.sql.opensearch.request.system.OpenSearchSystemRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @RequiredArgsConstructor @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchSystemIndexScan extends TableScanOperator { - /** - * OpenSearch request. - */ + /** OpenSearch request. */ private final OpenSearchSystemRequest request; - /** - * Search response for current batch. - */ + /** Search response for current batch. */ private Iterator iterator; @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java index aa09ff4660..781431ea67 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -15,53 +14,52 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprType; -/** - * Definition of the system table schema. - */ +/** Definition of the system table schema. */ @Getter @RequiredArgsConstructor public enum OpenSearchSystemIndexSchema { - - SYS_TABLE_TABLES(new LinkedHashMap() {{ - put("TABLE_CAT", STRING); - put("TABLE_SCHEM", STRING); - put("TABLE_NAME", STRING); - put("TABLE_TYPE", STRING); - put("REMARKS", STRING); - put("TYPE_CAT", STRING); - put("TYPE_SCHEM", STRING); - put("TYPE_NAME", STRING); - put("SELF_REFERENCING_COL_NAME", STRING); - put("REF_GENERATION", STRING); - } - } - ), - SYS_TABLE_MAPPINGS(new ImmutableMap.Builder() - .put("TABLE_CAT", STRING) - .put("TABLE_SCHEM", STRING) - .put("TABLE_NAME", STRING) - .put("COLUMN_NAME", STRING) - .put("DATA_TYPE", STRING) - .put("TYPE_NAME", STRING) - .put("COLUMN_SIZE", STRING) - .put("BUFFER_LENGTH", STRING) - .put("DECIMAL_DIGITS", STRING) - .put("NUM_PREC_RADIX", STRING) - .put("NULLABLE", STRING) - .put("REMARKS", STRING) - .put("COLUMN_DEF", STRING) - .put("SQL_DATA_TYPE", STRING) - .put("SQL_DATETIME_SUB", STRING) - .put("CHAR_OCTET_LENGTH", STRING) - .put("ORDINAL_POSITION", STRING) - .put("IS_NULLABLE", STRING) - .put("SCOPE_CATALOG", STRING) - .put("SCOPE_SCHEMA", STRING) - .put("SCOPE_TABLE", STRING) - .put("SOURCE_DATA_TYPE", STRING) - .put("IS_AUTOINCREMENT", STRING) - .put("IS_GENERATEDCOLUMN", STRING) - .build()); + SYS_TABLE_TABLES( + new LinkedHashMap() { + { + put("TABLE_CAT", STRING); + put("TABLE_SCHEM", STRING); + put("TABLE_NAME", STRING); + put("TABLE_TYPE", STRING); + put("REMARKS", STRING); + put("TYPE_CAT", STRING); + put("TYPE_SCHEM", STRING); + put("TYPE_NAME", STRING); + put("SELF_REFERENCING_COL_NAME", STRING); + put("REF_GENERATION", STRING); + } + }), + SYS_TABLE_MAPPINGS( + new ImmutableMap.Builder() + .put("TABLE_CAT", STRING) + .put("TABLE_SCHEM", STRING) + .put("TABLE_NAME", STRING) + .put("COLUMN_NAME", STRING) + .put("DATA_TYPE", STRING) + .put("TYPE_NAME", STRING) + .put("COLUMN_SIZE", STRING) + .put("BUFFER_LENGTH", STRING) + .put("DECIMAL_DIGITS", STRING) + .put("NUM_PREC_RADIX", STRING) + .put("NULLABLE", STRING) + .put("REMARKS", STRING) + .put("COLUMN_DEF", STRING) + .put("SQL_DATA_TYPE", STRING) + .put("SQL_DATETIME_SUB", STRING) + .put("CHAR_OCTET_LENGTH", STRING) + .put("ORDINAL_POSITION", STRING) + .put("IS_NULLABLE", STRING) + .put("SCOPE_CATALOG", STRING) + .put("SCOPE_SCHEMA", STRING) + .put("SCOPE_TABLE", STRING) + .put("SOURCE_DATA_TYPE", STRING) + .put("IS_AUTOINCREMENT", STRING) + .put("IS_GENERATEDCOLUMN", STRING) + .build()); private final Map mapping; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java index d985bcbeec..040b7d2759 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -89,17 +88,14 @@ class OpenSearchNodeClientTest { @Mock(answer = RETURNS_DEEP_STUBS) private NodeClient nodeClient; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse indexResponse; + @Mock private GetIndexResponse indexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); private OpenSearchClient client; @@ -110,8 +106,7 @@ void setUp() { @Test void is_index_exist() { - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(true)); assertTrue(client.exists("test")); @@ -120,8 +115,7 @@ void is_index_exist() { @Test void is_index_not_exist() { String indexName = "test"; - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(false)); assertFalse(client.exists(indexName)); @@ -137,11 +131,8 @@ void is_index_exist_with_exception() { @Test void create_index() { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(nodeClient.admin().indices() - .create(any(CreateIndexRequest.class)).actionGet()) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(nodeClient.admin().indices().create(any(CreateIndexRequest.class)).actionGet()) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -151,8 +142,7 @@ void create_index() { void create_index_with_exception() { when(nodeClient.admin().indices().create(any())).thenThrow(RuntimeException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -172,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -247,11 +236,8 @@ void get_index_mappings_with_IOException() { @Test void get_index_mappings_with_non_exist_index() { - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get() - ).thenThrow(IndexNotFoundException.class); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenThrow(IndexNotFoundException.class); assertThrows(IndexNotFoundException.class, () -> client.getIndexMappings("non_exist_index")); } @@ -307,9 +293,7 @@ void search() { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -320,9 +304,13 @@ void search() { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -355,9 +343,13 @@ void cleanup() { when(requestBuilder.addScrollId(any())).thenReturn(requestBuilder); when(requestBuilder.get()).thenReturn(null); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -372,9 +364,13 @@ void cleanup() { @Test void cleanup_without_scrollId() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(nodeClient, never()).prepareClearScroll(); } @@ -384,9 +380,13 @@ void cleanup_without_scrollId() { void cleanup_rethrows_exception() { when(nodeClient.prepareClearScroll()).thenThrow(new RuntimeException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -398,10 +398,8 @@ void get_indices() { AliasMetadata aliasMetadata = mock(AliasMetadata.class); final var openMap = Map.of("index", List.of(aliasMetadata)); when(aliasMetadata.alias()).thenReturn("index_alias"); - when(nodeClient.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get()).thenReturn(indexResponse); + when(nodeClient.admin().indices().prepareGetIndex().setLocal(true).get()) + .thenReturn(indexResponse); when(indexResponse.getIndices()).thenReturn(new String[] {"index"}); when(indexResponse.aliases()).thenReturn(openMap); @@ -427,10 +425,8 @@ void ml() { public void mockNodeClientIndicesMappings(String indexName, String mappings) { GetMappingsResponse mockResponse = mock(GetMappingsResponse.class); MappingMetadata emptyMapping = mock(MappingMetadata.class); - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get()).thenReturn(mockResponse); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenReturn(mockResponse); try { Map metadata; if (mappings.isEmpty()) { @@ -445,13 +441,12 @@ public void mockNodeClientIndicesMappings(String indexName, String mappings) { } } - private void mockNodeClientSettings(String indexName, String indexMetadata) - throws IOException { + private void mockNodeClientSettings(String indexName, String indexMetadata) throws IOException { GetSettingsResponse mockResponse = mock(GetSettingsResponse.class); when(nodeClient.admin().indices().prepareGetSettings(any()).setLocal(anyBoolean()).get()) .thenReturn(mockResponse); - Map metadata = Map.of(indexName, - IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); + Map metadata = + Map.of(indexName, IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); when(mockResponse.getIndexToSettings()).thenReturn(metadata); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java index 409596910e..99201aae4f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -79,22 +78,20 @@ class OpenSearchRestClientTest { private static final String TEST_MAPPING_FILE = "mappings/accounts.json"; + @Mock(answer = RETURNS_DEEP_STUBS) private RestHighLevelClient restClient; private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse getIndexResponse; + @Mock private GetIndexResponse getIndexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); @BeforeEach void setUp() { @@ -103,8 +100,9 @@ void setUp() { @Test void is_index_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(true); assertTrue(client.exists("test")); @@ -112,8 +110,9 @@ void is_index_exist() throws IOException { @Test void is_index_not_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(false); assertFalse(client.exists("test")); @@ -129,11 +128,8 @@ void is_index_exist_with_exception() throws IOException { @Test void create_index() throws IOException { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(restClient.indices() - .create(any(), any())) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(restClient.indices().create(any(), any())) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -142,8 +138,7 @@ void create_index() throws IOException { @Test void create_index_with_IOException() throws IOException { when(restClient.indices().create(any(), any())).thenThrow(IOException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -167,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -234,14 +228,11 @@ void get_index_max_result_windows_settings() throws IOException { Integer maxResultWindow = 1000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, maxResultWindowSettings); - Map indexToDefaultSettings = - mockSettings(indexName, emptySettings); + Map indexToSettings = mockSettings(indexName, maxResultWindowSettings); + Map indexToDefaultSettings = mockSettings(indexName, emptySettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -258,14 +249,11 @@ void get_index_max_result_windows_default_settings() throws IOException { Integer maxResultWindow = 10000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, emptySettings); - Map indexToDefaultSettings = - mockSettings(indexName, maxResultWindowSettings); + Map indexToSettings = mockSettings(indexName, emptySettings); + Map indexToDefaultSettings = mockSettings(indexName, maxResultWindowSettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -292,9 +280,7 @@ void search() throws IOException { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -305,9 +291,13 @@ void search() throws IOException { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -327,9 +317,14 @@ void search_with_IOException() throws IOException { when(restClient.search(any(), any())).thenThrow(new IOException()); assertThrows( IllegalStateException.class, - () -> client.search(new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()))); + () -> + client.search( + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()))); } @Test @@ -349,28 +344,34 @@ void scroll_with_IOException() throws IOException { when(restClient.scroll(any(), any())).thenThrow(new IOException()); // First request run successfully - OpenSearchScrollRequest scrollRequest = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest scrollRequest = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.search(scrollRequest); - assertThrows( - IllegalStateException.class, () -> client.search(scrollRequest)); + assertThrows(IllegalStateException.class, () -> client.search(scrollRequest)); } @Test void schedule() { AtomicBoolean isRun = new AtomicBoolean(false); - client.schedule( - () -> isRun.set(true)); + client.schedule(() -> isRun.set(true)); assertTrue(isRun.get()); } @Test @SneakyThrows void cleanup() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); @@ -381,9 +382,13 @@ void cleanup() { @Test void cleanup_without_scrollId() throws IOException { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(restClient, never()).clearScroll(any(), any()); } @@ -393,9 +398,13 @@ void cleanup_without_scrollId() throws IOException { void cleanup_with_IOException() { when(restClient.clearScroll(any(), any())).thenThrow(new IOException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java index 4edb25aff5..defa97d8c8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java index cda4377c60..38a4ad3199 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java index b60402e746..9b7e032c57 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -27,54 +26,73 @@ public void type_of_ExprTextValue() { @Test public void getFields() { - var fields = Map.of( - "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), - "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), - "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); + var fields = + Map.of( + "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); assertEquals(fields, OpenSearchTextType.of(fields).getFields()); } @Test void non_text_types_arent_converted() { assertAll( - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(INTEGER))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(STRING))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(INTEGER))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(STRING))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", STRING)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER))); } @Test void non_text_types_with_nested_objects_arent_converted() { - var objectType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Object, - Map.of("subfield", OpenSearchDataType.of(STRING))); - var arrayType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested, - Map.of("subfield", OpenSearchDataType.of(STRING))); + var objectType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Object, + Map.of("subfield", OpenSearchDataType.of(STRING))); + var arrayType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Nested, + Map.of("subfield", OpenSearchDataType.of(STRING))); assertAll( () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", objectType)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType))); } @Test void text_type_without_fields_isnt_converted() { - assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); } @Test void text_type_with_fields_is_converted() { - var textWithKeywordType = OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); - assertEquals("field.keyword", - OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); + var textWithKeywordType = + OpenSearchTextType.of( + Map.of("keyword", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + assertEquals( + "field.keyword", OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index 827606a961..3d3a6a5996 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -88,8 +87,8 @@ class OpenSearchExprValueFactoryTest { .put("timeNoMillisOrTimeV", OpenSearchDateType.of("time_no_millis || time")) .put("dateOrOrdinalDateV", OpenSearchDateType.of("date || ordinal_date")) .put("customFormatV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss")) - .put("customAndEpochMillisV", - OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) + .put( + "customAndEpochMillisV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) .put("incompleteFormatV", OpenSearchDateType.of("year")) .put("boolV", OpenSearchDataType.of(BOOLEAN)) .put("structV", OpenSearchDataType.of(STRUCT)) @@ -98,20 +97,22 @@ class OpenSearchExprValueFactoryTest { .put("arrayV", OpenSearchDataType.of(ARRAY)) .put("arrayV.info", OpenSearchDataType.of(STRING)) .put("arrayV.author", OpenSearchDataType.of(STRING)) - .put("deepNestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) - .put("deepNestedV.year", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "deepNestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) + .put( + "deepNestedV.year", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("deepNestedV.year.timeV", OpenSearchDateType.of(TIME)) - .put("nestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "nestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("nestedV.count", OpenSearchDataType.of(INTEGER)) .put("textV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text)) - .put("textKeywordV", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) + .put( + "textKeywordV", + OpenSearchTextType.of( + Map.of("words", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) .put("ipV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip)) .put("geoV", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint)) .put("binaryV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary)) @@ -124,9 +125,8 @@ class OpenSearchExprValueFactoryTest { public void constructNullValue() { assertAll( () -> assertEquals(nullValue(), tupleValue("{\"intV\":null}").get("intV")), - () -> assertEquals(nullValue(), constructFromObject("intV", null)), - () -> assertTrue(new OpenSearchJsonContent(null).isNull()) - ); + () -> assertEquals(nullValue(), constructFromObject("intV", null)), + () -> assertTrue(new OpenSearchJsonContent(null).isNull())); } @Test @@ -136,8 +136,7 @@ public void iterateArrayValue() throws JsonProcessingException { assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), () -> assertEquals("bb", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -146,8 +145,7 @@ public void iterateArrayValueWithOneElement() throws JsonProcessingException { var arrayIt = new OpenSearchJsonContent(mapper.readTree("[\"zz\"]")).array(); assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -160,8 +158,7 @@ public void constructByte() { assertAll( () -> assertEquals(byteValue((byte) 1), tupleValue("{\"byteV\":1}").get("byteV")), () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", 1)), - () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0")) - ); + () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0"))); } @Test @@ -169,8 +166,7 @@ public void constructShort() { assertAll( () -> assertEquals(shortValue((short) 1), tupleValue("{\"shortV\":1}").get("shortV")), () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", 1)), - () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0")) - ); + () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0"))); } @Test @@ -178,8 +174,7 @@ public void constructInteger() { assertAll( () -> assertEquals(integerValue(1), tupleValue("{\"intV\":1}").get("intV")), () -> assertEquals(integerValue(1), constructFromObject("intV", 1)), - () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0")) - ); + () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0"))); } @Test @@ -192,33 +187,29 @@ public void constructLong() { assertAll( () -> assertEquals(longValue(1L), tupleValue("{\"longV\":1}").get("longV")), () -> assertEquals(longValue(1L), constructFromObject("longV", 1L)), - () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0")) - ); + () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0"))); } @Test public void constructFloat() { assertAll( () -> assertEquals(floatValue(1f), tupleValue("{\"floatV\":1.0}").get("floatV")), - () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f)) - ); + () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f))); } @Test public void constructDouble() { assertAll( () -> assertEquals(doubleValue(1d), tupleValue("{\"doubleV\":1.0}").get("doubleV")), - () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d)) - ); + () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d))); } @Test public void constructString() { assertAll( - () -> assertEquals(stringValue("text"), - tupleValue("{\"stringV\":\"text\"}").get("stringV")), - () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text")) - ); + () -> + assertEquals(stringValue("text"), tupleValue("{\"stringV\":\"text\"}").get("stringV")), + () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text"))); } @Test @@ -228,23 +219,25 @@ public void constructBoolean() { () -> assertEquals(booleanValue(true), constructFromObject("boolV", true)), () -> assertEquals(booleanValue(true), constructFromObject("boolV", "true")), () -> assertEquals(booleanValue(true), constructFromObject("boolV", 1)), - () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0)) - ); + () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0))); } @Test public void constructText() { assertAll( - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textV\":\"text\"}").get("textV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textV", "text")), - - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textKeywordV", "text")) - ); + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textV\":\"text\"}").get("textV")), + () -> + assertEquals(new OpenSearchExprTextValue("text"), constructFromObject("textV", "text")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), constructFromObject("textKeywordV", "text"))); } @Test @@ -252,95 +245,122 @@ public void constructDates() { ExprValue dateStringV = constructFromObject("dateStringV", "1984-04-12"); assertAll( () -> assertEquals(new ExprDateValue("1984-04-12"), dateStringV), - () -> assertEquals(new ExprDateValue( - LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), - constructFromObject("dateV", 450576000000L)), - () -> assertEquals(new ExprDateValue("1984-04-12"), - constructFromObject("dateOrOrdinalDateV", "1984-103")), - () -> assertEquals(new ExprDateValue("2015-01-01"), - tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV")) - ); + () -> + assertEquals( + new ExprDateValue( + LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), + constructFromObject("dateV", 450576000000L)), + () -> + assertEquals( + new ExprDateValue("1984-04-12"), + constructFromObject("dateOrOrdinalDateV", "1984-103")), + () -> + assertEquals( + new ExprDateValue("2015-01-01"), + tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV"))); } @Test public void constructTimes() { - ExprValue timeStringV = constructFromObject("timeStringV","12:10:30.000Z"); + ExprValue timeStringV = constructFromObject("timeStringV", "12:10:30.000Z"); assertAll( () -> assertTrue(timeStringV.isDateTime()), () -> assertTrue(timeStringV instanceof ExprTimeValue), () -> assertEquals(new ExprTimeValue("12:10:30"), timeStringV), - () -> assertEquals(new ExprTimeValue(LocalTime.from( - Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), - constructFromObject("timeV", 1420070400001L)), - () -> assertEquals(new ExprTimeValue("09:07:42.000"), - constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), - () -> assertEquals(new ExprTimeValue("09:07:42"), - tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV")) - ); + () -> + assertEquals( + new ExprTimeValue( + LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), + constructFromObject("timeV", 1420070400001L)), + () -> + assertEquals( + new ExprTimeValue("09:07:42.000"), + constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), + () -> + assertEquals( + new ExprTimeValue("09:07:42"), + tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV"))); } @Test public void constructDatetime() { assertAll( - () -> assertEquals( - new ExprTimestampValue("2015-01-01 00:00:00"), - tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", "1420070400001")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), - constructFromObject("epochSecondV", 142704001L)), - () -> assertEquals( - new ExprTimeValue("10:20:30"), - tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), - () -> assertEquals( - new ExprDateValue("1961-04-12"), - tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), - () -> assertEquals( - new ExprTimestampValue("1984-05-10 20:30:40"), - tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - constructFromObject("timestampV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("dateOrEpochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 00:00:00"), + tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), + constructFromObject("epochSecondV", 142704001L)), + () -> + assertEquals( + new ExprTimeValue("10:20:30"), + tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), + () -> + assertEquals( + new ExprDateValue("1961-04-12"), + tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), + () -> + assertEquals( + new ExprTimestampValue("1984-05-10 20:30:40"), + tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + constructFromObject("timestampV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("dateOrEpochMillisV", "1420070400001")), // case: timestamp-formatted field, but it only gets a time: should match a time - () -> assertEquals( - new ExprTimeValue("19:36:22"), - tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimeValue("19:36:22"), + tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), // case: timestamp-formatted field, but it only gets a date: should match a date - () -> assertEquals( - new ExprDateValue("2011-03-03"), - tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV")) - ); + () -> + assertEquals( + new ExprDateValue("2011-03-03"), + tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV"))); } @Test @@ -350,11 +370,11 @@ public void constructDatetime_fromCustomFormat() { constructFromObject("customFormatV", "2015-01-01-12-10-30")); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("customFormatV", "2015-01-01 12-10-30")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, unsupported format.", exception.getMessage()); assertEquals( @@ -369,91 +389,87 @@ public void constructDatetime_fromCustomFormat() { @Test public void constructDatetimeFromUnsupportedFormat_ThrowIllegalArgumentException() { IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timestampV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, unsupported format.", exception.getMessage()); // fail with missing seconds exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateOrEpochMillisV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, unsupported format.", exception.getMessage()); } @Test public void constructTimeFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); assertEquals( - "Construct TIME from \"2015-01-01\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct TIME from \"2015-01-01\" failed, unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); assertEquals( - "Construct TIME from \"10:10\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct TIME from \"10:10\" failed, unsupported format.", exception.getMessage()); } @Test public void constructDateFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); assertEquals( - "Construct DATE from \"12:10:10\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct DATE from \"12:10:10\" failed, unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); - assertEquals( - "Construct DATE from \"abc\" failed, " - + "unsupported format.", - exception.getMessage()); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); + assertEquals("Construct DATE from \"abc\" failed, unsupported format.", exception.getMessage()); } @Test public void constructDateFromIncompleteFormat() { - assertEquals( - new ExprDateValue("1984-01-01"), - constructFromObject("incompleteFormatV", "1984")); + assertEquals(new ExprDateValue("1984-01-01"), constructFromObject("incompleteFormatV", "1984")); } @Test public void constructArray() { assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), tupleValue("{\"arrayV\":[{\"info\":\"zz\",\"author\":\"au\"}]}").get("arrayV")); assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), - constructFromObject("arrayV", List.of( - ImmutableMap.of("info", "zz", "author", "au")))); + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), + constructFromObject("arrayV", List.of(ImmutableMap.of("info", "zz", "author", "au")))); } @Test public void constructArrayOfStrings() { - assertEquals(new ExprCollectionValue( - List.of(stringValue("zz"), stringValue("au"))), + assertEquals( + new ExprCollectionValue(List.of(stringValue("zz"), stringValue("au"))), constructFromObject("arrayV", List.of("zz", "au"))); } @@ -461,100 +477,71 @@ public void constructArrayOfStrings() { public void constructNestedArraysOfStrings() { assertEquals( new ExprCollectionValue( - List.of( - collectionValue( - List.of("zz", "au") - ), - collectionValue( - List.of("ss") - ) - ) - ), - tupleValueWithArraySupport( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + List.of(collectionValue(List.of("zz", "au")), collectionValue(List.of("ss")))), + tupleValueWithArraySupport("{\"stringV\":[ [\"zz\", \"au\"], [\"ss\"] ]}").get("stringV")); } @Test public void constructNestedArraysOfStringsReturnsFirstIndex() { assertEquals( - stringValue("zz"), - tupleValue( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + stringValue("zz"), tupleValue("{\"stringV\":[[\"zz\", \"au\"],[\"ss\"]]}").get("stringV")); } @Test public void constructMultiNestedArraysOfStringsReturnsFirstIndex() { assertEquals( stringValue("z"), - tupleValue( - "{\"stringV\":" - + "[\"z\"," - + "[\"s\"]," - + "[\"zz\", \"au\"]" - + "]}" - ).get("stringV")); + tupleValue("{\"stringV\":" + "[\"z\",[\"s\"],[\"zz\", \"au\"]]}").get("stringV")); } @Test public void constructArrayOfInts() { - assertEquals(new ExprCollectionValue( - List.of(integerValue(1), integerValue(2))), + assertEquals( + new ExprCollectionValue(List.of(integerValue(1), integerValue(2))), constructFromObject("arrayV", List.of(1, 2))); } @Test public void constructArrayOfShorts() { // Shorts are treated same as integer - assertEquals(new ExprCollectionValue( - List.of(shortValue((short)3), shortValue((short)4))), + assertEquals( + new ExprCollectionValue(List.of(shortValue((short) 3), shortValue((short) 4))), constructFromObject("arrayV", List.of(3, 4))); } @Test public void constructArrayOfLongs() { - assertEquals(new ExprCollectionValue( - List.of(longValue(123456789L), longValue(987654321L))), + assertEquals( + new ExprCollectionValue(List.of(longValue(123456789L), longValue(987654321L))), constructFromObject("arrayV", List.of(123456789L, 987654321L))); } @Test public void constructArrayOfFloats() { - assertEquals(new ExprCollectionValue( - List.of(floatValue(3.14f), floatValue(4.13f))), + assertEquals( + new ExprCollectionValue(List.of(floatValue(3.14f), floatValue(4.13f))), constructFromObject("arrayV", List.of(3.14f, 4.13f))); } @Test public void constructArrayOfDoubles() { - assertEquals(new ExprCollectionValue( - List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), + assertEquals( + new ExprCollectionValue(List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), constructFromObject("arrayV", List.of(9.1928374756D, 4.987654321D))); } @Test public void constructArrayOfBooleans() { - assertEquals(new ExprCollectionValue( - List.of(booleanValue(true), booleanValue(false))), + assertEquals( + new ExprCollectionValue(List.of(booleanValue(true), booleanValue(false))), constructFromObject("arrayV", List.of(true, false))); } @Test public void constructNestedObjectArrayNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1), - Map.of("count", 2) - )), - tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1), Map.of("count", 2))), + tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}").get("nestedV")); } @Test @@ -562,84 +549,70 @@ public void constructNestedObjectArrayOfObjectArraysNode() { assertEquals( collectionValue( List.of( - Map.of("year", + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ), - Map.of("year", + Map.of("timeV", new ExprTimeValue("09:07:42")))), + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ) - ) - ), + Map.of("timeV", new ExprTimeValue("09:07:42")))))), tupleValueWithArraySupport( - "{\"deepNestedV\":" - + "[" - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}," - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}" - + "]" - + "}") + "{\"deepNestedV\":" + + " [" + + " {\"year\":" + + " [" + + " {\"timeV\":\"09:07:42\"}," + + " {\"timeV\":\"09:07:42\"}" + + " ]" + + " }," + + " {\"year\":" + + " [" + + " {\"timeV\":\"09:07:42\"}," + + " {\"timeV\":\"09:07:42\"}" + + " ]" + + " }" + + " ]" + + "}") .get("deepNestedV")); } @Test public void constructNestedArrayNode() { - assertEquals(collectionValue( - List.of( - 1969, - 2011 - )), - tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(1969, 2011)), + tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}").get("nestedV")); } @Test public void constructNestedObjectNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1969) - )), - tupleValue("{\"nestedV\":{\"count\":1969}}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1969))), + tupleValue("{\"nestedV\":{\"count\":1969}}").get("nestedV")); } @Test public void constructArrayOfGeoPoints() { - assertEquals(new ExprCollectionValue( + assertEquals( + new ExprCollectionValue( List.of( new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), - new OpenSearchExprGeoPointValue(-33.6123556, 66.287449)) - ), + new OpenSearchExprGeoPointValue(-33.6123556, 66.287449))), tupleValueWithArraySupport( - "{\"geoV\":[" - + "{\"lat\":42.60355556,\"lon\":-97.25263889}," - + "{\"lat\":-33.6123556,\"lon\":66.287449}" - + "]}" - ).get("geoV") - ); + "{\"geoV\":[" + + "{\"lat\":42.60355556,\"lon\":-97.25263889}," + + "{\"lat\":-33.6123556,\"lon\":66.287449}" + + "]}") + .get("geoV")); } @Test public void constructArrayOfIPsReturnsFirstIndex() { assertEquals( new OpenSearchExprIpValue("192.168.0.1"), - tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}") - .get("ipV") - ); + tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}").get("ipV")); } @Test @@ -647,8 +620,7 @@ public void constructBinaryArrayReturnsFirstIndex() { assertEquals( new OpenSearchExprBinaryValue("U29tZSBiaWsdfsdfgYmxvYg=="), tupleValue("{\"binaryV\":[\"U29tZSBiaWsdfsdfgYmxvYg==\",\"U987yuhjjiy8jhk9vY+98jjdf\"]}") - .get("binaryV") - ); + .get("binaryV")); } @Test @@ -656,26 +628,21 @@ public void constructArrayOfCustomEpochMillisReturnsFirstIndex() { assertEquals( new ExprDatetimeValue("2015-01-01 12:10:30"), tupleValue("{\"customAndEpochMillisV\":[\"2015-01-01 12:10:30\",\"1999-11-09 01:09:44\"]}") - .get("customAndEpochMillisV") - ); + .get("customAndEpochMillisV")); } @Test public void constructArrayOfDateStringsReturnsFirstIndex() { assertEquals( new ExprDateValue("1984-04-12"), - tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}") - .get("dateStringV") - ); + tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}").get("dateStringV")); } @Test public void constructArrayOfTimeStringsReturnsFirstIndex() { assertEquals( new ExprTimeValue("12:10:30"), - tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}") - .get("timeStringV") - ); + tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}").get("timeStringV")); } @Test @@ -683,8 +650,7 @@ public void constructArrayOfEpochMillis() { assertEquals( new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), tupleValue("{\"dateOrEpochMillisV\":[\"1420070400001\",\"1454251113333\"]}") - .get("dateOrEpochMillisV") - ); + .get("dateOrEpochMillisV")); } @Test @@ -711,54 +677,64 @@ public void constructStruct() { @Test public void constructIP() { - assertEquals(new OpenSearchExprIpValue("192.168.0.1"), + assertEquals( + new OpenSearchExprIpValue("192.168.0.1"), tupleValue("{\"ipV\":\"192.168.0.1\"}").get("ipV")); } @Test public void constructGeoPoint() { - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":-97.25263889}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":\"42.60355556\",\"lon\":\"-97.25263889\"}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), constructFromObject("geoV", "42.60355556,-97.25263889")); } @Test public void constructGeoPointFromUnsupportedFormatShouldThrowException() { IllegalStateException exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":[42.60355556,-97.25263889]}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lon\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":true,\"lon\":-97.25263889}}").get("geoV")); assertEquals("latitude must be number value, but got value: true", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":false}}").get("geoV")); assertEquals("longitude must be number value, but got value: false", exception.getMessage()); } @Test public void constructBinary() { - assertEquals(new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), + assertEquals( + new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), tupleValue("{\"binaryV\":\"U29tZSBiaW5hcnkgYmxvYg==\"}").get("binaryV")); } @@ -769,14 +745,16 @@ public void constructBinary() { @Test public void constructFromOpenSearchArrayReturnFirstElement() { assertEquals(integerValue(1), tupleValue("{\"intV\":[1, 2, 3]}").get("intV")); - assertEquals(new ExprTupleValue( - new LinkedHashMap() { - { - put("id", integerValue(1)); - put("state", stringValue("WA")); - } - }), tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") - .get("structV")); + assertEquals( + new ExprTupleValue( + new LinkedHashMap() { + { + put("id", integerValue(1)); + put("state", stringValue("WA")); + } + }), + tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") + .get("structV")); } @Test @@ -799,19 +777,13 @@ public void constructUnsupportedTypeThrowException() { new OpenSearchExprValueFactory(Map.of("type", new TestType())); IllegalStateException exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("{\"type\":1}", false) - ); + IllegalStateException.class, () -> exprValueFactory.construct("{\"type\":1}", false)); assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("type", 1, false) - ); - assertEquals( - "Unsupported type: TEST_TYPE for value: 1.", - exception.getMessage()); + IllegalStateException.class, () -> exprValueFactory.construct("type", 1, false)); + assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); } @Test @@ -820,21 +792,21 @@ public void constructUnsupportedTypeThrowException() { public void factoryMappingsAreExtendableWithoutOverWrite() throws NoSuchFieldException, IllegalAccessException { var factory = new OpenSearchExprValueFactory(Map.of("value", OpenSearchDataType.of(INTEGER))); - factory.extendTypeMapping(Map.of( - "value", OpenSearchDataType.of(DOUBLE), - "agg", OpenSearchDataType.of(DATE))); + factory.extendTypeMapping( + Map.of( + "value", OpenSearchDataType.of(DOUBLE), + "agg", OpenSearchDataType.of(DATE))); // extract private field for testing purposes var field = factory.getClass().getDeclaredField("typeMapping"); field.setAccessible(true); @SuppressWarnings("unchecked") - var mapping = (Map)field.get(factory); + var mapping = (Map) field.get(factory); assertAll( () -> assertEquals(2, mapping.size()), () -> assertTrue(mapping.containsKey("value")), () -> assertTrue(mapping.containsKey("agg")), () -> assertEquals(OpenSearchDataType.of(INTEGER), mapping.get("value")), - () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg")) - ); + () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg"))); } public Map tupleValue(String jsonString) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java index 6d2b9b13ce..047a510180 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java @@ -32,17 +32,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchQueryManagerTest { - @Mock - private QueryId queryId; + @Mock private QueryId queryId; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private UnresolvedPlan plan; + @Mock private UnresolvedPlan plan; - @Mock - private ResponseListener listener; + @Mock private ResponseListener listener; @Test public void submitQuery() { @@ -51,19 +47,20 @@ public void submitQuery() { when(nodeClient.threadPool()).thenReturn(threadPool); AtomicBoolean isRun = new AtomicBoolean(false); - AbstractPlan queryPlan = new QueryPlan(queryId, plan, queryService, listener) { - @Override - public void execute() { - isRun.set(true); - } - }; + AbstractPlan queryPlan = + new QueryPlan(queryId, plan, queryService, listener) { + @Override + public void execute() { + isRun.set(true); + } + }; doAnswer( - invocation -> { - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(threadPool) .schedule(any(), any(), any()); new OpenSearchQueryManager(nodeClient).submit(queryPlan); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java index 96e85a8173..26bcdf6d89 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -27,17 +26,13 @@ @ExtendWith(MockitoExtension.class) class ResourceMonitorPlanTest { - @Mock - private PhysicalPlan plan; + @Mock private PhysicalPlan plan; - @Mock - private ResourceMonitor resourceMonitor; + @Mock private ResourceMonitor resourceMonitor; - @Mock - private PhysicalPlanNodeVisitor visitor; + @Mock private PhysicalPlanNodeVisitor visitor; - @Mock - private Object context; + @Mock private Object context; private ResourceMonitorPlan monitorPlan; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java index af4cdc8ce6..a61f7343e6 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -20,11 +19,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchMemoryHealthyTest { - @Mock - private OpenSearchMemoryHealthy.RandomFail randomFail; + @Mock private OpenSearchMemoryHealthy.RandomFail randomFail; - @Mock - private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; + @Mock private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; private OpenSearchMemoryHealthy monitor; @@ -45,7 +42,8 @@ void memoryUsageExceedLimitFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(true); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -54,7 +52,8 @@ void memoryUsageExceedLimitWithoutFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(false); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -72,8 +71,7 @@ void randomFail() { @Test void setMemoryUsage() { - OpenSearchMemoryHealthy.MemoryUsage usage = - new OpenSearchMemoryHealthy.MemoryUsage(); + OpenSearchMemoryHealthy.MemoryUsage usage = new OpenSearchMemoryHealthy.MemoryUsage(); assertTrue(usage.usage() > 0); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java index cd27b0710e..f56d8cb81b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -24,11 +23,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResourceMonitorTest { - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private OpenSearchMemoryHealthy memoryMonitor; + @Mock private OpenSearchMemoryHealthy memoryMonitor; @BeforeEach public void setup() { @@ -47,8 +44,8 @@ void isHealthy() { @Test void notHealthyFastFailure() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -58,8 +55,8 @@ void notHealthyFastFailure() { @Test void notHealthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -70,8 +67,9 @@ void notHealthyWithRetry() { @Test void healthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class).thenReturn(true); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) + .thenReturn(true); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java index b6966f2403..d2bc5b0641 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -41,32 +40,23 @@ @ExtendWith(MockitoExtension.class) public class OpenSearchQueryRequestTest { - @Mock - private Function searchAction; + @Mock private Function searchAction; - @Mock - private Function scrollAction; + @Mock private Function scrollAction; - @Mock - private Consumer cleanAction; + @Mock private Consumer cleanAction; - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private FetchSourceContext fetchSourceContext; + @Mock private FetchSourceContext fetchSourceContext; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final OpenSearchQueryRequest request = new OpenSearchQueryRequest("test", 200, factory, List.of()); @@ -76,12 +66,9 @@ public class OpenSearchQueryRequestTest { @Test void search() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -96,12 +83,9 @@ void search() { @Test void search_withoutContext() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -113,12 +97,9 @@ void search_withoutContext() { @Test void search_withIncludes() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); String[] includes = {"_id", "_index"}; when(searchAction.apply(any())).thenReturn(searchResponse); @@ -144,13 +125,15 @@ void clean() { void searchRequest() { request.getSourceBuilder().query(QueryBuilders.termQuery("name", "John")); - assertSearchRequest(new SearchRequest() - .indices("test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + assertSearchRequest( + new SearchRequest() + .indices("test") + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), request); } @@ -161,28 +144,31 @@ void searchCrossClusterRequest() { assertSearchRequest( new SearchRequest() .indices("ccs:test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), remoteRequest); } @Test void writeTo_unsupported() { - assertThrows(UnsupportedOperationException.class, - () -> request.writeTo(mock(StreamOutput.class))); + assertThrows( + UnsupportedOperationException.class, () -> request.writeTo(mock(StreamOutput.class))); } private void assertSearchRequest(SearchRequest expected, OpenSearchQueryRequest request) { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], - new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; request.search(querySearch, searchScrollRequest -> null); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java index 483ea1290e..5bb0a2207b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.Assert.assertThrows; @@ -71,11 +70,10 @@ class OpenSearchRequestBuilderTest { private static final Integer DEFAULT_LIMIT = 200; private static final Integer MAX_RESULT_WINDOW = 500; - private static final OpenSearchRequest.IndexName indexName - = new OpenSearchRequest.IndexName("test"); + private static final OpenSearchRequest.IndexName indexName = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; private OpenSearchRequestBuilder requestBuilder; @@ -99,7 +97,8 @@ void build_query_request() { .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) .trackScores(true), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -111,12 +110,14 @@ void build_scroll_request_with_correct_size() { assertEquals( new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), new SearchSourceBuilder() .from(offset) .size(MAX_RESULT_WINDOW - offset) .timeout(DEFAULT_QUERY_TIMEOUT), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -126,33 +127,32 @@ void test_push_down_query() { requestBuilder.pushDownFilter(query); var r = requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT); - Function querySearch = searchRequest -> { - assertEquals( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT) - .query(query) - .sort(DOC_FIELD_NAME, ASC), - searchRequest.source() - ); - return mock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; + Function querySearch = + searchRequest -> { + assertEquals( + new SearchSourceBuilder() + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT) + .query(query) + .sort(DOC_FIELD_NAME, ASC), + searchRequest.source()); + return mock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; r.search(querySearch, scrollSearch); - } @Test void test_push_down_aggregation() { - AggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList(new TermsValuesSourceBuilder("longA"))); + AggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", Collections.singletonList(new TermsValuesSourceBuilder("longA"))); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser("AVG(intA)")); + new CompositeAggregationParser(new SingleValueParser("AVG(intA)")); requestBuilder.pushDownAggregation(Pair.of(List.of(aggBuilder), responseParser)); assertEquals( @@ -161,8 +161,7 @@ void test_push_down_aggregation() { .size(0) .timeout(DEFAULT_QUERY_TIMEOUT) .aggregation(aggBuilder), - requestBuilder.getSourceBuilder() - ); + requestBuilder.getSourceBuilder()); verify(exprValueFactory).setParser(responseParser); } @@ -184,21 +183,25 @@ void test_push_down_query_and_sort() { requestBuilder); } - void assertSearchSourceBuilder(SearchSourceBuilder expected, - OpenSearchRequestBuilder requestBuilder) + void assertSearchSourceBuilder( + SearchSourceBuilder expected, OpenSearchRequestBuilder requestBuilder) throws UnsupportedOperationException { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest.source()); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], new TotalHits(0, - TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; - requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT).search( - querySearch, scrollSearch); + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest.source()); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; + requestBuilder + .build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT) + .search(querySearch, scrollSearch); } @Test @@ -231,9 +234,8 @@ void test_push_down_non_field_sort() { @Test void test_push_down_multiple_sort() { - requestBuilder.pushDownSort(List.of( - SortBuilders.fieldSort("intA"), - SortBuilders.fieldSort("intB"))); + requestBuilder.pushDownSort( + List.of(SortBuilders.fieldSort("intA"), SortBuilders.fieldSort("intB"))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -255,7 +257,7 @@ void test_push_down_project() { .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -285,7 +287,7 @@ void test_push_down_project_limit() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -315,7 +317,7 @@ void test_push_down_project_limit_and_offset() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -333,24 +335,25 @@ void test_push_down_project_limit_and_offset() { @Test void test_push_down_nested() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -363,28 +366,29 @@ void test_push_down_nested() { @Test void test_push_down_multiple_nested_with_same_path() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), - Map.of( - "field", new ReferenceExpression("message.from", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING)), + Map.of( + "field", new ReferenceExpression("message.from", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null), - new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null) - ); + new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info", "message.from"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext( + true, new String[] {"message.info", "message.from"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(nestedQuery))) @@ -396,35 +400,35 @@ void test_push_down_multiple_nested_with_same_path() { @Test void test_push_down_nested_with_filter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.getSourceBuilder().query(QueryBuilders.rangeQuery("myNum").gt(3)); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("myNum").gt(3)) - .must(nestedQuery) - ) - ) + QueryBuilders.boolQuery() + .filter( + QueryBuilders.boolQuery() + .must(QueryBuilders.rangeQuery("myNum").gt(3)) + .must(nestedQuery))) .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT), @@ -433,17 +437,15 @@ void test_push_down_nested_with_filter() { @Test void testPushDownNestedWithNestedFilter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); QueryBuilder innerFilterQuery = QueryBuilders.rangeQuery("myNum").gt(3); QueryBuilder filterQuery = @@ -452,20 +454,20 @@ void testPushDownNestedWithNestedFilter() { requestBuilder.getSourceBuilder().query(filterQuery); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); - - assertSearchSourceBuilder(new SearchSourceBuilder() - .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(filterQuery) - ) - ) - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); + + assertSearchSourceBuilder( + new SearchSourceBuilder() + .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(filterQuery))) + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT), + requestBuilder); } @Test @@ -479,8 +481,9 @@ void test_push_type_mapping() { @Test void push_down_highlight_with_repeating_fields() { requestBuilder.pushDownHighlight("name", Map.of()); - var exception = assertThrows(SemanticCheckException.class, () -> - requestBuilder.pushDownHighlight("name", Map.of())); + var exception = + assertThrows( + SemanticCheckException.class, () -> requestBuilder.pushDownHighlight("name", Map.of())); assertEquals("Duplicate field name in highlight", exception.getMessage()); } @@ -488,10 +491,7 @@ void push_down_highlight_with_repeating_fields() { void push_down_page_size() { requestBuilder.pushDownPageSize(3); assertSearchSourceBuilder( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(3) - .timeout(DEFAULT_QUERY_TIMEOUT), + new SearchSourceBuilder().from(DEFAULT_OFFSET).size(3).timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); } @@ -499,7 +499,8 @@ void push_down_page_size() { void exception_when_non_zero_offset_and_page_size() { requestBuilder.pushDownPageSize(3); requestBuilder.pushDownLimit(300, 2); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java index 4b9233dbc1..66cb6bf14c 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertAll; @@ -49,50 +48,48 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchScrollRequestTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(1); - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - private final OpenSearchScrollRequest request = new OpenSearchScrollRequest( - INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of()); + private final OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of()); @Test void constructor() { - var request = new OpenSearchScrollRequest(INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of("test")); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of("test")); assertEquals(List.of("test"), request.getIncludes()); } @Test void searchRequest() { searchSourceBuilder.query(QueryBuilders.termQuery("name", "John")); - request.search(searchRequest -> { - assertEquals( - new SearchRequest() - .indices("test") - .scroll(TimeValue.timeValueMinutes(1)) - .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), - searchRequest); - SearchHits searchHitsMock = when(mock(SearchHits.class).getHits()) - .thenReturn(new SearchHit[0]).getMock(); - return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); - }, searchScrollRequest -> null); + request.search( + searchRequest -> { + assertEquals( + new SearchRequest() + .indices("test") + .scroll(TimeValue.timeValueMinutes(1)) + .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), + searchRequest); + SearchHits searchHitsMock = + when(mock(SearchHits.class).getHits()).thenReturn(new SearchHit[0]).getMock(); + return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); + }, + searchScrollRequest -> null); } @Test @@ -110,21 +107,19 @@ void isScrollStarted() { void scrollRequest() { request.setScrollId("scroll123"); assertEquals( - new SearchScrollRequest() - .scroll(TimeValue.timeValueMinutes(1)) - .scrollId("scroll123"), + new SearchScrollRequest().scroll(TimeValue.timeValueMinutes(1)).scrollId("scroll123"), request.scrollRequest()); } @Test void search() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -135,13 +130,13 @@ void search() { @Test void search_without_context() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -154,13 +149,13 @@ void search_without_context() { @SneakyThrows void search_without_scroll_and_initial_request_should_throw() { // Steps: serialize a not used request, deserialize it, then use - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); var outStream = new BytesStreamOutput(); request.writeTo(outStream); outStream.flush(); @@ -172,20 +167,21 @@ void search_without_scroll_and_initial_request_should_throw() { assertAll( () -> assertFalse(request2.isScroll()), () -> assertNull(request2.getInitialSearchRequest()), - () -> assertThrows(UnsupportedOperationException.class, - () -> request2.search(sr -> fail("search"), sr -> fail("scroll"))) - ); + () -> + assertThrows( + UnsupportedOperationException.class, + () -> request2.search(sr -> fail("search"), sr -> fail("scroll")))); } @Test void search_withoutIncludes() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -213,9 +209,10 @@ void clean_on_empty_response() { // This could happen on sequential search calls SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll1", "scroll2"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals("scroll1", request.getScrollId()); @@ -233,8 +230,9 @@ void clean_on_empty_response() { void no_clean_on_non_empty_response() { SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); request.search((sr) -> searchResponse, (sr) -> searchResponse); assertEquals("scroll", request.getScrollId()); @@ -246,8 +244,7 @@ void no_clean_on_non_empty_response() { @Test void no_cursor_on_empty_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], null, 1f)); + when(searchResponse.getHits()).thenReturn(new SearchHits(new SearchHit[0], null, 1f)); request.search((x) -> searchResponse, (x) -> searchResponse); assertFalse(request.hasAnotherBatch()); @@ -256,8 +253,9 @@ void no_cursor_on_empty_response() { @Test void no_clean_if_no_scroll_in_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals(NO_SCROLL_ID, request.getScrollId()); @@ -286,8 +284,10 @@ void serialize_deserialize_no_needClean() { @Test @SneakyThrows void serialize_deserialize_needClean() { - lenient().when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + lenient() + .when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); lenient().when(searchResponse.getScrollId()).thenReturn(""); var stream = new BytesStreamOutput(); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java index 7ed6c900dd..cd915cf5e5 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java @@ -36,12 +36,7 @@ class OpenSearchAggregationResponseParserTest { /** SELECT MAX(age) as max FROM accounts. */ @Test void no_bucket_one_metric_should_pass() { - String response = - "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; + String response = "{\n \"max#max\": {\n \"value\": 40\n }\n}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); assertThat(parse(parser, response), contains(entry("max", 40d))); } @@ -145,12 +140,7 @@ void two_bucket_one_metric_should_pass() { @Test void unsupported_aggregation_should_fail() { - String response = - "{\n" - + " \"date_histogram#date_histogram\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; + String response = "{\n \"date_histogram#date_histogram\": {\n \"value\": 40\n }\n}"; NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); RuntimeException exception = assertThrows(RuntimeException.class, () -> parse(parser, response)); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java index b26847b095..6f4605bc2f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static java.util.Collections.emptyList; @@ -49,31 +48,25 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResponseTest { - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit1; + @Mock private SearchHit searchHit1; - @Mock - private SearchHit searchHit2; + @Mock private SearchHit searchHit2; - @Mock - private Aggregations aggregations; + @Mock private Aggregations aggregations; private List includes = List.of(); - @Mock - private OpenSearchAggregationResponseParser parser; + @Mock private OpenSearchAggregationResponseParser parser; - private ExprTupleValue exprTupleValue1 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", - new ExprIntegerValue(1))); + private ExprTupleValue exprTupleValue1 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); - private ExprTupleValue exprTupleValue2 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", - new ExprIntegerValue(2))); + private ExprTupleValue exprTupleValue2 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", new ExprIntegerValue(2))); @Test void isEmpty() { @@ -119,7 +112,8 @@ void iterator() { when(searchHit1.getInnerHits()).thenReturn(null); when(searchHit2.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())) - .thenReturn(exprTupleValue1).thenReturn(exprTupleValue2); + .thenReturn(exprTupleValue1) + .thenReturn(exprTupleValue2); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, List.of("id1"))) { @@ -137,9 +131,8 @@ void iterator() { @Test void iterator_metafields() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -160,15 +153,16 @@ void iterator_metafields() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_routing", new ExprStringValue(shardTarget.toString()), - "_sort", new ExprLongValue(123456L), - "_score", new ExprFloatValue(3.75F), - "_maxscore", new ExprFloatValue(3.75F) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_routing", new ExprStringValue(shardTarget.toString()), + "_sort", new ExprLongValue(123456L), + "_score", new ExprFloatValue(3.75F), + "_maxscore", new ExprFloatValue(3.75F))); List includes = List.of("id1", "_index", "_id", "_routing", "_sort", "_score", "_maxscore"); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { @@ -184,9 +178,8 @@ void iterator_metafields() { @Test void iterator_metafields_withoutIncludes() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -200,9 +193,8 @@ void iterator_metafields_withoutIncludes() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -217,9 +209,8 @@ void iterator_metafields_withoutIncludes() { @Test void iterator_metafields_scoreNaN() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -237,12 +228,13 @@ void iterator_metafields_scoreNaN() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1", "_index", "_id", "_sort", "_score", "_maxscore"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_sort", new ExprLongValue(123456L) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_sort", new ExprLongValue(123456L))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -262,13 +254,14 @@ void iterator_with_inner_hits() { new SearchHit[] {searchHit1}, new TotalHits(2L, TotalHits.Relation.EQUAL_TO), 1.0F)); - when(searchHit1.getInnerHits()).thenReturn( - Map.of( - "innerHit", - new SearchHits( - new SearchHit[] {searchHit1}, - new TotalHits(2L, TotalHits.Relation.EQUAL_TO), - 1.0F))); + when(searchHit1.getInnerHits()) + .thenReturn( + Map.of( + "innerHit", + new SearchHits( + new SearchHit[] {searchHit1}, + new TotalHits(2L, TotalHits.Relation.EQUAL_TO), + 1.0F))); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue1); @@ -321,18 +314,17 @@ void aggregation_iterator() { @Test void highlight_iterator() { SearchHit searchHit = new SearchHit(1); - searchHit.sourceRef( - new BytesArray("{\"name\":\"John\"}")); - Map highlightMap = Map.of("highlights", - new HighlightField("Title", new Text[] {new Text("field")})); - searchHit.highlightFields(Map.of("highlights", new HighlightField("Title", - new Text[] {new Text("field")}))); + searchHit.sourceRef(new BytesArray("{\"name\":\"John\"}")); + Map highlightMap = + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")})); + searchHit.highlightFields( + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")}))); ExprValue resultTuple = ExprValueUtils.tupleValue(searchHit.getSourceAsMap()); when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[]{searchHit1}, + new SearchHit[] {searchHit1}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); @@ -340,11 +332,12 @@ void highlight_iterator() { when(factory.construct(any(), anyBoolean())).thenReturn(resultTuple); for (ExprValue resultHit : new OpenSearchResponse(searchResponse, factory, includes)) { - var expected = ExprValueUtils.collectionValue( - Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) - .map(t -> (t.toString())).collect(Collectors.toList())); - var result = resultHit.tupleValue().get( - "_highlight").tupleValue().get("highlights"); + var expected = + ExprValueUtils.collectionValue( + Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) + .map(t -> (t.toString())) + .collect(Collectors.toList())); + var result = resultHit.tupleValue().get("_highlight").tupleValue().get("highlights"); assertTrue(expected.equals(result)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java index 835798f162..ff2c311753 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,14 +32,12 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSettingsTest { - @Mock - private ClusterSettings clusterSettings; + @Mock private ClusterSettings clusterSettings; @Test void getSettingValue() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -50,11 +47,14 @@ void getSettingValue() { @Test void getSettingValueWithPresetValuesInYml() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings - .get((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) + when(clusterSettings.get( + (Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) .thenReturn(new ByteSizeValue(20)); - when(clusterSettings.get(not(or(eq(ClusterName.CLUSTER_NAME_SETTING), - eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) + when(clusterSettings.get( + not( + or( + eq(ClusterName.CLUSTER_NAME_SETTING), + eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) .thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -78,8 +78,7 @@ void pluginNonDynamicSettings() { @Test void getSettings() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertFalse(settings.getSettings().isEmpty()); } @@ -87,12 +86,10 @@ void getSettings() { @Test void update() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue oldValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); - OpenSearchSettings.Updater updater = - settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); + OpenSearchSettings.Updater updater = settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); updater.accept(new ByteSizeValue(0L)); ByteSizeValue newValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -103,8 +100,7 @@ void update() { @Test void settingsFallback() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertEquals( settings.getSettingValue(Settings.Key.SQL_ENABLED), @@ -156,17 +152,17 @@ public void updateLegacySettingsFallback() { assertEquals(OpenSearchSettings.SQL_ENABLED_SETTING.get(settings), false); assertEquals(OpenSearchSettings.SQL_SLOWLOG_SETTING.get(settings), 10); - assertEquals(OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), - timeValueMinutes(1)); + assertEquals( + OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), timeValueMinutes(1)); assertEquals(OpenSearchSettings.PPL_ENABLED_SETTING.get(settings), true); - assertEquals(OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), + assertEquals( + OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), new ByteSizeValue((int) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.2))); assertEquals(OpenSearchSettings.QUERY_SIZE_LIMIT_SETTING.get(settings), 100); assertEquals(OpenSearchSettings.METRICS_ROLLING_WINDOW_SETTING.get(settings), 2000L); assertEquals(OpenSearchSettings.METRICS_ROLLING_INTERVAL_SETTING.get(settings), 100L); } - @Test void legacySettingsShouldBeDeprecatedBeforeRemove() { assertEquals(15, legacySettings().size()); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java index 39af59b6cd..3ddb07d86a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java @@ -63,20 +63,16 @@ class OpenSearchIndexTest { public static final int QUERY_SIZE_LIMIT = 200; public static final TimeValue SCROLL_TIMEOUT = new TimeValue(1); - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private IndexMapping mapping; + @Mock private IndexMapping mapping; private OpenSearchIndex index; @@ -94,16 +90,18 @@ void isExist() { @Test void createIndex() { - Map mappings = Map.of( - "properties", + Map mappings = Map.of( - "name", "text", - "age", "integer")); + "properties", + Map.of( + "name", "text", + "age", "integer")); doNothing().when(client).createIndex("test", mappings); Map schema = new HashMap<>(); - schema.put("name", OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(MappingType.Keyword)))); + schema.put( + "name", + OpenSearchTextType.of(Map.of("keyword", OpenSearchDataType.of(MappingType.Keyword)))); schema.put("age", INTEGER); index.create(schema); verify(client).createIndex(any(), any()); @@ -111,24 +109,27 @@ void createIndex() { @Test void getFieldTypes() { - when(mapping.getFieldMappings()).thenReturn( - ImmutableMap.builder() - .put("name", MappingType.Keyword) - .put("address", MappingType.Text) - .put("age", MappingType.Integer) - .put("account_number", MappingType.Long) - .put("balance1", MappingType.Float) - .put("balance2", MappingType.Double) - .put("gender", MappingType.Boolean) - .put("family", MappingType.Nested) - .put("employer", MappingType.Object) - .put("birthday", MappingType.Date) - .put("id1", MappingType.Byte) - .put("id2", MappingType.Short) - .put("blob", MappingType.Binary) - .build().entrySet().stream().collect(Collectors.toMap( - Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue()) - ))); + when(mapping.getFieldMappings()) + .thenReturn( + ImmutableMap.builder() + .put("name", MappingType.Keyword) + .put("address", MappingType.Text) + .put("age", MappingType.Integer) + .put("account_number", MappingType.Long) + .put("balance1", MappingType.Float) + .put("balance2", MappingType.Double) + .put("gender", MappingType.Boolean) + .put("family", MappingType.Nested) + .put("employer", MappingType.Object) + .put("birthday", MappingType.Date) + .put("id1", MappingType.Byte) + .put("id2", MappingType.Short) + .put("blob", MappingType.Binary) + .build() + .entrySet() + .stream() + .collect( + Collectors.toMap(Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); // Run more than once to confirm caching logic is covered and can work @@ -150,35 +151,30 @@ void getFieldTypes() { hasEntry("birthday", ExprCoreType.TIMESTAMP), hasEntry("id1", ExprCoreType.BYTE), hasEntry("id2", ExprCoreType.SHORT), - hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)) - )); + hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)))); } } @Test void checkCacheUsedForFieldMappings() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); - when(client.getIndexMappings("test")).thenReturn( - ImmutableMap.of("test", mapping)); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); + when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); OpenSearchIndex index = new OpenSearchIndex(client, settings, "test"); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); - lenient().when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Integer))); + lenient() + .when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Integer))); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); } @Test @@ -193,8 +189,7 @@ void getReservedFieldTypes() { hasEntry("_routing", ExprCoreType.STRING), hasEntry("_sort", ExprCoreType.LONG), hasEntry("_score", ExprCoreType.FLOAT), - hasEntry("_maxscore", ExprCoreType.FLOAT) - )); + hasEntry("_maxscore", ExprCoreType.FLOAT))); } @Test @@ -204,8 +199,9 @@ void implementRelationOperatorOnly() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, - 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(index.optimize(plan))); } @@ -216,8 +212,10 @@ void implementRelationOperatorWithOptimization() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, 200, - requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(plan)); + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + index.implement(plan)); } @Test @@ -239,12 +237,7 @@ void implementOtherLogicalOperators() { LogicalPlanDSL.dedupe( sort( eval( - remove( - rename( - index.createScanBuilder(), - mappings), - exclude), - newEvalField), + remove(rename(index.createScanBuilder(), mappings), exclude), newEvalField), sortField), dedupeField), include); @@ -258,9 +251,11 @@ void implementOtherLogicalOperators() { PhysicalPlanDSL.eval( PhysicalPlanDSL.remove( PhysicalPlanDSL.rename( - new OpenSearchIndexScan(client, - QUERY_SIZE_LIMIT, requestBuilder.build(INDEX_NAME, maxResultWindow, - SCROLL_TIMEOUT)), + new OpenSearchIndexScan( + client, + QUERY_SIZE_LIMIT, + requestBuilder.build( + INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), mappings), exclude), newEvalField), diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java index 1089e7e252..38f2ae495e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.junit.jupiter.api.Assertions.assertAll; @@ -25,31 +24,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchStorageEngineTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Settings settings; + @Mock private Settings settings; @Test public void getTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - "test"); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), "test"); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchIndex)); } @Test public void getSystemTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - TABLE_INFO); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchSystemIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), TABLE_INFO); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchSystemIndex)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java index 5a510fefec..229d62abdf 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java @@ -26,10 +26,8 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanAggregationBuilderTest { - @Mock - OpenSearchRequestBuilder requestBuilder; - @Mock - LogicalAggregation logicalAggregation; + @Mock OpenSearchRequestBuilder requestBuilder; + @Mock LogicalAggregation logicalAggregation; OpenSearchIndexScanAggregationBuilder builder; @BeforeEach @@ -71,5 +69,4 @@ void pushDownPageSize() { void pushDownNested() { assertFalse(builder.pushDownNested(mock(LogicalNested.class))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java index e045bae3e3..6749f87c5b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -91,16 +90,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanOptimizationTest { - @Mock - private Table table; + @Mock private Table table; - @Mock - private OpenSearchIndexScan indexScan; + @Mock private OpenSearchIndexScan indexScan; private OpenSearchIndexScanBuilder indexScanBuilder; - @Mock - private OpenSearchRequestBuilder requestBuilder; + @Mock private OpenSearchRequestBuilder requestBuilder; private Runnable[] verifyPushDownCalls = {}; @@ -114,72 +110,54 @@ void setUp() { void test_project_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withProjectPushedDown(DSL.ref("intV", INTEGER))), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), - project( - relation("schema", table), - DSL.named("i", DSL.ref("intV", INTEGER))) - ); + indexScanBuilder(withProjectPushedDown(DSL.ref("intV", INTEGER))), + DSL.named("i", DSL.ref("intV", INTEGER))), + project(relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE intV = 1. - */ + /** SELECT intV as i FROM schema WHERE intV = 1. */ @Test void test_filter_push_down() { assertEqualsAfterOptimization( project( indexScanBuilder( - //withProjectPushedDown(DSL.ref("intV", INTEGER)), - withFilterPushedDown(QueryBuilders.termQuery("intV", 1)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + // withProjectPushedDown(DSL.ref("intV", INTEGER)), + withFilterPushedDown(QueryBuilders.termQuery("intV", 1))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). - */ + /** SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). */ @Test void test_filter_on_opensearchfunction_with_trackedscores_push_down() { LogicalPlan expectedPlan = project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) queryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -197,35 +175,36 @@ void test_filter_on_multiple_opensearchfunctions_with_trackedscores_push_down() .should( QueryBuilders.queryStringQuery("QUERY") .field("intV", 1.5F) - .boost(12.5F) - ) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression firstQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + .boost(12.5F))), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression firstQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) firstQueryString).setScoreTracked(false); - FunctionExpression secondQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + FunctionExpression secondQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) secondQueryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - DSL.or(firstQueryString, secondQueryString) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), DSL.or(firstQueryString, secondQueryString)), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -235,34 +214,28 @@ void test_filter_on_opensearchfunction_without_trackedscores_push_down() { project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(false) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); - - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(false)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); + + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } - /** - * SELECT avg(intV) FROM schema GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema GROUP BY string_value. */ @Test void test_aggregation_push_down() { assertEqualsAfterOptimization( @@ -272,20 +245,17 @@ void test_aggregation_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /* @@ -319,125 +289,79 @@ void aggregation_cant_merge_indexScan_with_project() { } */ - /** - * Sort - Relation --> IndexScan. - */ + /** Sort - Relation --> IndexScan. */ @Test void test_sort_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withSortPushedDown( - SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first")) - ), - sort( - relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)) - ) - ); + SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)))); } @Test void test_page_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withPageSizePushDown(5)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), - paginate(project( - relation("schema", table), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), 5 - )); + indexScanBuilder(withPageSizePushDown(5)), DSL.named("intV", DSL.ref("intV", INTEGER))), + paginate( + project(relation("schema", table), DSL.named("intV", DSL.ref("intV", INTEGER))), 5)); } @Test void test_score_sort_push_down() { assertEqualsAfterOptimization( - indexScanBuilder( - withSortPushedDown( - SortBuilders.scoreSort().order(SortOrder.ASC) - ) - ), + indexScanBuilder(withSortPushedDown(SortBuilders.scoreSort().order(SortOrder.ASC))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)))); } @Test void test_limit_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + indexScanBuilder(withLimitPushedDown(1, 1)), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - limit( - relation("schema", table), - 1, 1), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + limit(relation("schema", table), 1, 1), DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test void test_highlight_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withHighlightPushedDown("*", Collections.emptyMap())), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ), + indexScanBuilder(withHighlightPushedDown("*", Collections.emptyMap())), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*")))), project( - highlight( - relation("schema", table), - DSL.literal("*"), Collections.emptyMap()), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ) - ); + highlight(relation("schema", table), DSL.literal("*"), Collections.emptyMap()), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*"))))); } @Test void test_nested_push_down() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); assertEqualsAfterOptimization( project( - nested( - indexScanBuilder( - withNestedPushedDown(nested.getFields())), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ), - project( - nested( - relation("schema", table), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ) - ); + nested(indexScanBuilder(withNestedPushedDown(nested.getFields())), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING)))), + project( + nested(relation("schema", table), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING))))); } - /** - * SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. */ @Test void test_aggregation_filter_push_down() { assertEqualsAfterOptimization( @@ -448,50 +372,37 @@ void test_aggregation_filter_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } - /** - * Sort - Filter - Relation --> IndexScan. - */ + /** Sort - Filter - Relation --> IndexScan. */ @Test void test_sort_filter_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withFilterPushedDown(QueryBuilders.termQuery("intV", 1)), withSortPushedDown( - SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")) - ), + SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first"))), sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)))); } - /** - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. - */ + /** SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. */ @Test void test_sort_aggregation_push_down() { assertEqualsAfterOptimization( @@ -502,22 +413,19 @@ void test_sort_aggregation_push_down() { .aggregateBy("intV") .groupBy("stringV") .sortBy(SortOption.DEFAULT_DESC) - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test @@ -529,21 +437,17 @@ void test_limit_sort_filter_push_down() { withSortPushedDown( SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")), withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( limit( sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), 1, 1 - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), + 1, + 1), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } /* @@ -557,23 +461,16 @@ void only_one_project_should_be_push() { project( project( indexScanBuilder( - withProjectPushedDown( - DSL.ref("intV", INTEGER), - DSL.ref("stringV", STRING))), + withProjectPushedDown(DSL.ref("intV", INTEGER), DSL.ref("stringV", STRING))), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( project( relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } @Test @@ -586,21 +483,14 @@ void test_nested_sort_filter_push_down() { SortBuilders.fieldSort("message.info") .order(SortOrder.ASC) .setNestedSort(new NestedSortBuilder("message")))), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - sort( - filter( - relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of( - SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)) - ) - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + sort( + filter( + relation("schema", table), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)))), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test @@ -610,54 +500,30 @@ void test_function_expression_sort_returns_optimized_logical_sort() { sort( indexScanBuilder(), Pair.of( - SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message"))) - ) - ), + SortOption.DEFAULT_ASC, DSL.match(DSL.namedArgument("field", literal("message"))))), sort( relation("schema", table), Pair.of( SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message")) - ) - ) - ) - ); + DSL.match(DSL.namedArgument("field", literal("message")))))); } @Test void test_non_field_sort_returns_optimized_logical_sort() { // Invalid use case coverage OpenSearchIndexScanBuilder::sortByFieldsOnly returns false assertEqualsAfterOptimization( - sort( - indexScanBuilder(), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ), - sort( - relation("schema", table), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ) - ); + sort(indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field")))); } @Test void sort_with_expression_cannot_merge_with_relation() { assertEqualsAfterOptimization( sort( - indexScanBuilder(), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -669,20 +535,17 @@ void sort_with_expression_cannot_merge_with_aggregation() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -690,30 +553,21 @@ void aggregation_cant_merge_index_scan_with_limit() { assertEqualsAfterOptimization( project( aggregation( - indexScanBuilder( - withLimitPushedDown(10, 0)), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + indexScanBuilder(withLimitPushedDown(10, 0)), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( - limit( - relation("schema", table), - 10, 0), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + limit(relation("schema", table), 10, 0), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /** - * Can't Optimize the following query. - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY avg(intV). + * Can't Optimize the following query. SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY + * avg(intV). */ @Test void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { @@ -725,52 +579,39 @@ void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test void project_literal_should_not_be_pushed_down() { assertEqualsAfterOptimization( - project( - indexScanBuilder(), - DSL.named("i", DSL.literal("str")) - ), - optimize( - project( - relation("schema", table), - DSL.named("i", DSL.literal("str")) - ) - ) - ); + project(indexScanBuilder(), DSL.named("i", DSL.literal("str"))), + optimize(project(relation("schema", table), DSL.named("i", DSL.literal("str"))))); } private OpenSearchIndexScanBuilder indexScanBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - return new OpenSearchIndexScanBuilder(new OpenSearchIndexScanQueryBuilder(requestBuilder), - requestBuilder -> indexScan); + return new OpenSearchIndexScanBuilder( + new OpenSearchIndexScanQueryBuilder(requestBuilder), requestBuilder -> indexScan); } private OpenSearchIndexScanBuilder indexScanAggBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - var aggregationBuilder = new OpenSearchIndexScanAggregationBuilder( - requestBuilder, mock(LogicalAggregation.class)); + var aggregationBuilder = + new OpenSearchIndexScanAggregationBuilder(requestBuilder, mock(LogicalAggregation.class)); return new OpenSearchIndexScanBuilder(aggregationBuilder, builder -> indexScan); } @@ -797,29 +638,32 @@ private Runnable withAggregationPushedDown( AggregationAssertHelper.AggregationAssertHelperBuilder aggregation) { // Assume single term bucket and AVG metric in all tests in this suite - CompositeAggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList( - new TermsValuesSourceBuilder(aggregation.groupBy) - .field(aggregation.groupBy) - .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") - .missingOrder(aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") - .missingBucket(true))) - .subAggregation( - AggregationBuilders.avg(aggregation.aggregateName) - .field(aggregation.aggregateBy)) - .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); + CompositeAggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", + Collections.singletonList( + new TermsValuesSourceBuilder(aggregation.groupBy) + .field(aggregation.groupBy) + .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") + .missingOrder( + aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") + .missingBucket(true))) + .subAggregation( + AggregationBuilders.avg(aggregation.aggregateName).field(aggregation.aggregateBy)) + .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); List aggBuilders = Collections.singletonList(aggBuilder); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser(aggregation.aggregateName)); + new CompositeAggregationParser(new SingleValueParser(aggregation.aggregateName)); return () -> { verify(requestBuilder, times(1)).pushDownAggregation(Pair.of(aggBuilders, responseParser)); - verify(requestBuilder, times(1)).pushTypeMapping(aggregation.resultTypes - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - e -> OpenSearchDataType.of(e.getValue())))); + verify(requestBuilder, times(1)) + .pushTypeMapping( + aggregation.resultTypes.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); }; } @@ -832,8 +676,8 @@ private Runnable withLimitPushedDown(int size, int offset) { } private Runnable withProjectPushedDown(ReferenceExpression... references) { - return () -> verify(requestBuilder, times(1)).pushDownProjects( - new HashSet<>(Arrays.asList(references))); + return () -> + verify(requestBuilder, times(1)).pushDownProjects(new HashSet<>(Arrays.asList(references))); } private Runnable withHighlightPushedDown(String field, Map arguments) { @@ -875,16 +719,18 @@ private static class AggregationAssertHelper { } private LogicalPlan optimize(LogicalPlan plan) { - LogicalPlanOptimizer optimizer = new LogicalPlanOptimizer(List.of( - new CreateTableScanBuilder(), - new PushDownPageSize(), - PUSH_DOWN_FILTER, - PUSH_DOWN_AGGREGATION, - PUSH_DOWN_SORT, - PUSH_DOWN_LIMIT, - PUSH_DOWN_HIGHLIGHT, - PUSH_DOWN_NESTED, - PUSH_DOWN_PROJECT)); + LogicalPlanOptimizer optimizer = + new LogicalPlanOptimizer( + List.of( + new CreateTableScanBuilder(), + new PushDownPageSize(), + PUSH_DOWN_FILTER, + PUSH_DOWN_AGGREGATION, + PUSH_DOWN_SORT, + PUSH_DOWN_LIMIT, + PUSH_DOWN_HIGHLIGHT, + PUSH_DOWN_NESTED, + PUSH_DOWN_PROJECT)); return optimizer.optimize(plan); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java index 67f0869d6e..2085519b12 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java @@ -44,34 +44,37 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class OpenSearchIndexScanPaginationTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final int MAX_RESULT_WINDOW = 3; public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(4); - @Mock - private Settings settings; + @Mock private Settings settings; @BeforeEach void setup() { lenient().when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(QUERY_SIZE); - lenient().when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) - .thenReturn(TimeValue.timeValueMinutes(1)); + lenient() + .when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); } - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory - = new OpenSearchExprValueFactory(Map.of( - "name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), + "department", OpenSearchDataType.of(STRING))); @Test void query_empty_result() { mockResponse(client); var builder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (var indexScan = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -80,8 +83,11 @@ void query_empty_result() { @Test void explain_not_implemented() { - assertThrows(Throwable.class, () -> mock(OpenSearchIndexScan.class, - withSettings().defaultAnswer(CALLS_REAL_METHODS)).explain()); + assertThrows( + Throwable.class, + () -> + mock(OpenSearchIndexScan.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)) + .explain()); } @Test @@ -92,9 +98,11 @@ void dont_serialize_if_no_cursor() { OpenSearchResponse response = mock(); when(builder.build(any(), anyInt(), any())).thenReturn(request); when(client.search(any())).thenReturn(response); - try (var indexScan - = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); when(request.hasAnotherBatch()).thenReturn(false); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java index 67749c4055..ac1e9038fb 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertAll; @@ -66,20 +65,19 @@ class OpenSearchIndexScanTest { public static final int QUERY_SIZE = 200; - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("employees"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("employees"); public static final int MAX_RESULT_WINDOW = 10000; public static final TimeValue CURSOR_KEEP_ALIVE = TimeValue.timeValueMinutes(1); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory = new OpenSearchExprValueFactory( - Map.of("name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), "department", OpenSearchDataType.of(STRING))); @BeforeEach - void setup() { - } + void setup() {} @Test void explain() { @@ -96,8 +94,8 @@ void throws_no_cursor_exception() { var request = mock(OpenSearchRequest.class); when(request.hasAnotherBatch()).thenReturn(false); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request); - var byteStream = new ByteArrayOutputStream(); - var objectStream = new ObjectOutputStream(byteStream)) { + var byteStream = new ByteArrayOutputStream(); + var objectStream = new ObjectOutputStream(byteStream)) { assertThrows(NoCursorException.class, () -> objectStream.writeObject(indexScan)); } } @@ -112,8 +110,9 @@ void serialize() { var index = mock(OpenSearchIndex.class); when(engine.getClient()).thenReturn(client); when(engine.getTable(any(), any())).thenReturn(index); - var request = new OpenSearchScrollRequest( - INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); request.setScrollId("valid-id"); // make a response, so OpenSearchResponse::isEmpty would return true and unset needClean var response = mock(SearchResponse.class); @@ -121,7 +120,7 @@ void serialize() { var hits = mock(SearchHits.class); when(response.getHits()).thenReturn(hits); when(response.getScrollId()).thenReturn("valid-id"); - when(hits.getHits()).thenReturn(new SearchHit[]{ mock() }); + when(hits.getHits()).thenReturn(new SearchHit[] {mock()}); request.search(null, (req) -> response); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request)) { @@ -145,8 +144,9 @@ void query_empty_result() { mockResponse(client); final var name = new OpenSearchRequest.IndexName("test"); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -155,88 +155,84 @@ void query_empty_result() { @Test void query_all_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), employee(2, "Smith", "HR"), employee(3, "Allen", "IT") + }); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } - static final OpenSearchRequest.IndexName EMPLOYEES_INDEX - = new OpenSearchRequest.IndexName("employees"); + static final OpenSearchRequest.IndexName EMPLOYEES_INDEX = + new OpenSearchRequest.IndexName("employees"); @Test void query_all_results_with_scroll() { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT")}); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @Test void query_some_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int limit = 3; OpenSearchRequestBuilder builder = new OpenSearchRequestBuilder(0, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -245,55 +241,56 @@ void query_some_results_with_query() { void query_some_results_with_scroll() { mockTwoPageResponse(client); final var requestuilder = new OpenSearchRequestBuilder(10, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } static void mockTwoPageResponse(OpenSearchClient client) { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); } @Test void query_results_limited_by_query_size() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int defaultQuerySize = 2; final var requestBuilder = new OpenSearchRequestBuilder(defaultQuerySize, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - defaultQuerySize, requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, + defaultQuerySize, + requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -323,7 +320,8 @@ void push_down_highlight() { .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) .pushDownHighlight("Body", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), + .shouldQueryHighlight( + QueryBuilders.termQuery("name", "John"), new HighlightBuilder().field("Title").field("Body")); } @@ -332,14 +330,12 @@ void push_down_highlight_with_arguments() { Map args = new HashMap<>(); args.put("pre_tags", new Literal("", DataType.STRING)); args.put("post_tags", new Literal("", DataType.STRING)); - HighlightBuilder highlightBuilder = new HighlightBuilder() - .field("Title"); + HighlightBuilder highlightBuilder = new HighlightBuilder().field("Title"); highlightBuilder.fields().get(0).preTags("").postTags(""); assertThat() .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), - highlightBuilder); + .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), highlightBuilder); } private PushDownAssertion assertThat() { @@ -352,8 +348,7 @@ private static class PushDownAssertion { private final OpenSearchResponse response; private final OpenSearchExprValueFactory factory; - public PushDownAssertion(OpenSearchClient client, - OpenSearchExprValueFactory valueFactory) { + public PushDownAssertion(OpenSearchClient client, OpenSearchExprValueFactory valueFactory) { this.client = client; this.requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, valueFactory); @@ -373,35 +368,39 @@ PushDownAssertion pushDownHighlight(String query, Map arguments } PushDownAssertion shouldQueryHighlight(QueryBuilder query, HighlightBuilder highlight) { - var sourceBuilder = new SearchSourceBuilder() - .from(0) - .timeout(CURSOR_KEEP_ALIVE) - .query(query) - .size(QUERY_SIZE) - .highlighter(highlight) - .sort(DOC_FIELD_NAME, ASC); + var sourceBuilder = + new SearchSourceBuilder() + .from(0) + .timeout(CURSOR_KEEP_ALIVE) + .query(query) + .size(QUERY_SIZE) + .highlighter(highlight) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, sourceBuilder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } PushDownAssertion shouldQuery(QueryBuilder expected) { - var builder = new SearchSourceBuilder() - .from(0) - .query(expected) - .size(QUERY_SIZE) - .timeout(CURSOR_KEEP_ALIVE) - .sort(DOC_FIELD_NAME, ASC); + var builder = + new SearchSourceBuilder() + .from(0) + .query(expected) + .size(QUERY_SIZE) + .timeout(CURSOR_KEEP_ALIVE) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, builder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java index 0b0568a6b7..5f233d7f45 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java @@ -1,6 +1,5 @@ package org.opensearch.sql.opensearch.storage.scan; - import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.Mockito.mock; @@ -21,12 +20,13 @@ class PushDownQueryBuilderTest { @Test void default_implementations() { - var sample = new PushDownQueryBuilder() { - @Override - public OpenSearchRequestBuilder build() { - return null; - } - }; + var sample = + new PushDownQueryBuilder() { + @Override + public OpenSearchRequestBuilder build() { + return null; + } + }; assertAll( () -> assertFalse(sample.pushDownFilter(mock(LogicalFilter.class))), () -> assertFalse(sample.pushDownProject(mock(LogicalProject.class))), @@ -34,9 +34,6 @@ public OpenSearchRequestBuilder build() { () -> assertFalse(sample.pushDownSort(mock(LogicalSort.class))), () -> assertFalse(sample.pushDownNested(mock(LogicalNested.class))), () -> assertFalse(sample.pushDownLimit(mock(LogicalLimit.class))), - () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class))) - - ); + () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class)))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java index 32c02959b8..781e27d71a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java @@ -36,76 +36,82 @@ class QueryStringTest { private final QueryStringQuery queryStringQuery = new QueryStringQuery(); private final FunctionName queryStringFunc = FunctionName.of("query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { Expression field = DSL.namedArgument("fields", fields_value); Expression query = DSL.namedArgument("query", query_value); return Stream.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).map(arg -> List.of(field, query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .map(arg -> List.of(field, query, arg)); } @ParameterizedTest @MethodSource("generateValidData") void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryStringQuery.build( - new QueryStringExpression(validArgs))); + Assertions.assertNotNull(queryStringQuery.build(new QueryStringExpression(validArgs))); } @Test void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @@ -124,14 +130,16 @@ public QueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java index a61b47b7b1..d81218c0c3 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java @@ -37,78 +37,78 @@ class QueryTest { static Stream> generateValidData() { Expression query = DSL.namedArgument("query", query_value); return List.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).stream().map(arg -> List.of(query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .stream() + .map(arg -> List.of(query, arg)); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryQuery.build( - new QueryExpression(validArgs))); + Assertions.assertNotNull(queryQuery.build(new QueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_field_argument() { - List arguments = List.of( - namedArgument("fields", "invalid argument"), - namedArgument("query", query_value)); - assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of(namedArgument("fields", "invalid argument"), namedArgument("query", query_value)); + assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of( + namedArgument("query", query_value), namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_sending_parameter_multiple_times() { - List arguments = List.of( + List arguments = + List.of( namedArgument("query", query_value), namedArgument("allow_leading_wildcard", DSL.literal("true")), namedArgument("allow_leading_wildcard", DSL.literal("true"))); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } private NamedArgumentExpression namedArgument(String name, String value) { @@ -126,14 +126,16 @@ public QueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } @@ -141,7 +143,6 @@ public ExprType type() { public void test_can_get_query_name() { List arguments = List.of(namedArgument("query", query_value)); queryQuery.build(new QueryExpression(arguments)); - Assertions.assertEquals("query", - queryQuery.getQueryName()); + Assertions.assertEquals("query", queryQuery.getQueryName()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java index 208c782593..ca87f42900 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -21,9 +20,10 @@ class RangeQueryTest { @Test void should_throw_exception_for_unsupported_comparison() { // Note that since we do switch check on enum comparison, this should be impossible - assertThrows(IllegalStateException.class, () -> - new RangeQuery(Comparison.BETWEEN) - .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); + assertThrows( + IllegalStateException.class, + () -> + new RangeQuery(Comparison.BETWEEN) + .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java index f7129117a1..ea14461521 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -37,148 +36,129 @@ class SimpleQueryStringTest { private final SimpleQueryStringQuery simpleQueryStringQuery = new SimpleQueryStringQuery(); private final FunctionName simpleQueryString = FunctionName.of("simple_query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { return Stream.of( - List.of( - DSL.namedArgument("fields", fields_value), - DSL.namedArgument("query", query_value) - ), + List.of(DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value)), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("analyze_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyzer", DSL.literal("standard")) - ), + DSL.namedArgument("analyzer", DSL.literal("standard"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("not|and")) - ), + DSL.namedArgument("flags", DSL.literal("not|and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("lenient", DSL.literal("true")) - ), + DSL.namedArgument("lenient", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("AND")) - ), + DSL.namedArgument("default_operator", DSL.literal("AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("and")) - ), + DSL.namedArgument("default_operator", DSL.literal("and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("minimum_should_match", DSL.literal("4")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")) - ), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("boost", DSL.literal("1")) - ), - List.of( - DSL.namedArgument("FIELDS", fields_value), - DSL.namedArgument("QUERY", query_value) - ), + DSL.namedArgument("boost", DSL.literal("1"))), + List.of(DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("QUERY", query_value)), List.of( DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyZER", DSL.literal("standard")) - ) - ); + DSL.namedArgument("analyZER", DSL.literal("standard")))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(simpleQueryStringQuery.build( - new SimpleQueryStringExpression(validArgs))); + Assertions.assertNotNull( + simpleQueryStringQuery.build(new SimpleQueryStringExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @@ -197,14 +177,16 @@ public SimpleQueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java index 98bd7c5784..7182626c02 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java @@ -36,46 +36,45 @@ class WildcardQueryTest { static Stream> generateValidData() { return Stream.of( List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), namedArgument("query", "query_value*"), namedArgument("boost", "0.7"), namedArgument("case_insensitive", "false"), - namedArgument("rewrite", "constant_score_boolean") - ) - ); + namedArgument("rewrite", "constant_score_boolean"))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(wildcardQueryQuery.build( - new WildcardQueryExpression(validArgs))); + Assertions.assertNotNull(wildcardQueryQuery.build(new WildcardQueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of(namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), - namedArgument("query", "query_value*"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("query", "query_value*"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @@ -86,14 +85,16 @@ public WildcardQueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java index 5406f4cb58..a93a1e5fa4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java @@ -48,12 +48,17 @@ class RelevanceQueryBuildTest { private QueryBuilder queryBuilder; private final Map> queryBuildActions = ImmutableMap.>builder() - .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))).build(); + .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))) + .build(); @BeforeEach public void setUp() { - query = mock(RelevanceQuery.class, withSettings().useConstructor(queryBuildActions) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + RelevanceQuery.class, + withSettings() + .useConstructor(queryBuildActions) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); queryBuilder = mock(QueryBuilder.class); when(query.createQueryBuilder(any())).thenReturn(queryBuilder); String queryName = "mock_query"; @@ -64,9 +69,13 @@ public void setUp() { @Test void throws_SemanticCheckException_when_same_argument_twice() { - FunctionExpression expr = createCall(List.of(FIELD_ARG, QUERY_ARG, - namedArgument("boost", "2.3"), - namedArgument("boost", "2.4"))); + FunctionExpression expr = + createCall( + List.of( + FIELD_ARG, + QUERY_ARG, + namedArgument("boost", "2.3"), + namedArgument("boost", "2.4"))); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); assertEquals("Parameter 'boost' can only be specified once.", exception.getMessage()); @@ -79,8 +88,7 @@ void throws_SemanticCheckException_when_wrong_argument_name() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); - assertEquals("Parameter wrongarg is invalid for mock_query function.", - exception.getMessage()); + assertEquals("Parameter wrongarg is invalid for mock_query function.", exception.getMessage()); } @Test @@ -95,14 +103,13 @@ void calls_action_when_correct_argument_name() { @ParameterizedTest @MethodSource("insufficientArguments") public void throws_SyntaxCheckException_when_no_required_arguments(List arguments) { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> query.build(createCall(arguments))); + SyntaxCheckException exception = + assertThrows(SyntaxCheckException.class, () -> query.build(createCall(arguments))); assertEquals("mock_query requires at least two parameters", exception.getMessage()); } public static Stream> insufficientArguments() { - return Stream.of(List.of(), - List.of(namedArgument("field", "field_A"))); + return Stream.of(List.of(), List.of(namedArgument("field", "field_A"))); } private static NamedArgumentExpression namedArgument(String field, String fieldValue) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java index 3628dc8abc..7234ee9275 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java @@ -26,14 +26,17 @@ class SingleFieldQueryTest { SingleFieldQuery query; private final String testQueryName = "test_query"; - private final Map actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach void setUp() { - query = mock(SingleFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + SingleFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -42,15 +45,20 @@ void createQueryBuilderTestTypeTextKeyword() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, - OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", + new ReferenceExpression( + sampleField, + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } @Test @@ -58,12 +66,13 @@ void createQueryBuilderTestTypeText() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, OpenSearchTextType.of())), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", new ReferenceExpression(sampleField, OpenSearchTextType.of())), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java index e84ed14e43..89a10ad563 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.hamcrest.MatcherAssert.assertThat; @@ -32,10 +31,7 @@ void build_sortbuilder_from_reference() { void build_sortbuilder_from_nested_function() { assertNotNull( sortQueryBuilder.build( - DSL.nested(DSL.ref("message.info", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + DSL.nested(DSL.ref("message.info", STRING)), Sort.SortOption.DEFAULT_ASC)); } @Test @@ -43,63 +39,56 @@ void build_sortbuilder_from_nested_function_with_path_param() { assertNotNull( sortQueryBuilder.build( DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_many_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING), - DSL.ref("message", STRING) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> + sortQueryBuilder.build( + DSL.nested( + DSL.ref("message.info", STRING), + DSL.ref("message", STRING), + DSL.ref("message", STRING)), + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_few_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested(), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(), Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_invalid_arg_type_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.literal(1) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); } @Test void build_sortbuilder_from_expression_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build( - new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } @Test void build_sortbuilder_from_function_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build(DSL.equal(DSL.ref( - "intV", INTEGER), DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(1)), + Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java index 494f3ff2d0..00d1c9ecd1 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,8 +20,7 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexScanTest { - @Mock - private OpenSearchSystemRequest request; + @Mock private OpenSearchSystemRequest request; @Test public void queryData() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java index a483f2dad8..1afcfcdc86 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -35,29 +34,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Table table; + @Mock private Table table; @Test void testGetFieldTypesOfMetaTable() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("TABLE_CAT", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("TABLE_CAT", STRING))); } @Test void testGetFieldTypesOfMappingTable() { - OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, mappingTable( - "test_index")); + OpenSearchSystemIndex systemIndex = + new OpenSearchSystemIndex(client, mappingTable("test_index")); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("COLUMN_NAME", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("COLUMN_NAME", STRING))); } @Test @@ -69,8 +62,7 @@ void testIsExist() { @Test void testCreateTable() { Table systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); - assertThrows(UnsupportedOperationException.class, - () -> systemIndex.create(ImmutableMap.of())); + assertThrows(UnsupportedOperationException.class, () -> systemIndex.create(ImmutableMap.of())); } @Test @@ -78,11 +70,8 @@ void implement() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); NamedExpression projectExpr = named("TABLE_NAME", ref("TABLE_NAME", STRING)); - final PhysicalPlan plan = systemIndex.implement( - project( - relation(TABLE_INFO, table), - projectExpr - )); + final PhysicalPlan plan = + systemIndex.implement(project(relation(TABLE_INFO, table), projectExpr)); assertTrue(plan instanceof ProjectOperator); assertTrue(plan.getChild().get(0) instanceof OpenSearchSystemIndexScan); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java index 85b8889de3..0db87f89d4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.utils; import com.google.common.collect.ImmutableSet; @@ -36,15 +35,13 @@ public static List group(NamedExpression... exprs) { return Arrays.asList(exprs); } - public static List> sort(Expression expr1, - Sort.SortOption option1) { + public static List> sort( + Expression expr1, Sort.SortOption option1) { return Collections.singletonList(Pair.of(option1, expr1)); } - public static List> sort(Expression expr1, - Sort.SortOption option1, - Expression expr2, - Sort.SortOption option2) { + public static List> sort( + Expression expr1, Sort.SortOption option1, Expression expr2, Sort.SortOption option2) { return Arrays.asList(Pair.of(option1, expr1), Pair.of(option2, expr2)); }