Skip to content

Commit

Permalink
Support Alternate Datetime Formats (opensearch-project#1664)
Browse files Browse the repository at this point in the history
* Support Alternate Datetime Formats (#268)

* Add OpenSearchDateType as a datatype for matching with Date/Time OpenSearch types

Signed-off-by: Andrew Carbonetto <[email protected]>

---------

Signed-off-by: Andrew Carbonetto <[email protected]>
Signed-off-by: GabeFernandez310 <[email protected]>
Signed-off-by: Guian Gumpac <[email protected]>
Signed-off-by: MaxKsyunz <[email protected]>
Co-authored-by: Andrew Carbonetto <[email protected]>
Co-authored-by: GabeFernandez310 <[email protected]>
Co-authored-by: MaxKsyunz <[email protected]>
Signed-off-by: Mitchell Gale <[email protected]>
  • Loading branch information
4 people authored and MitchellGale committed Jun 12, 2023
1 parent 401cd9b commit 0e6a648
Show file tree
Hide file tree
Showing 20 changed files with 1,536 additions and 272 deletions.
8 changes: 6 additions & 2 deletions docs/user/general/datatypes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ The table below list the mapping between OpenSearch Data Type, OpenSearch SQL Da
+-----------------+---------------------+-----------+
| text | text | VARCHAR |
+-----------------+---------------------+-----------+
| date | timestamp | TIMESTAMP |
| date* | timestamp | TIMESTAMP |
+-----------------+---------------------+-----------+
| date_nanos | timestamp | TIMESTAMP |
+-----------------+---------------------+-----------+
Expand All @@ -104,7 +104,11 @@ The table below list the mapping between OpenSearch Data Type, OpenSearch SQL Da
| nested | array | STRUCT |
+-----------------+---------------------+-----------+

Notes: Not all the OpenSearch SQL Type has correspond OpenSearch Type. e.g. data and time. To use function which required such data type, user should explicitly convert the data type.
Notes:
* Not all the OpenSearch SQL Type has correspond OpenSearch Type. e.g. data and time. To use function which required such data type, user should explicitly convert the data type.
* date*: Maps to `timestamp` by default. Based on the "format" property `date` can map to `date` or `time`. See list of supported named formats `here <https://opensearch.org/docs/latest/field-types/supported-field-types/date/>`_.
For example, `basic_date` will map to a `date` type, and `basic_time` will map to a `time` type.


Data Type Conversion
====================
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -651,6 +651,10 @@ public enum Index {
"calcs",
getMappingFile("calcs_index_mappings.json"),
"src/test/resources/calcs.json"),
DATE_FORMATS(TestsConstants.TEST_INDEX_DATE_FORMATS,
"date_formats",
getMappingFile("date_formats_index_mapping.json"),
"src/test/resources/date_formats.json"),
WILDCARD(TestsConstants.TEST_INDEX_WILDCARD,
"wildcard",
getMappingFile("wildcard_index_mappings.json"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ public class TestsConstants {
public final static String TEST_INDEX_BEER = TEST_INDEX + "_beer";
public final static String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing";
public final static String TEST_INDEX_CALCS = TEST_INDEX + "_calcs";
public final static String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats";
public final static String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard";
public final static String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested";
public final static String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls";
Expand Down
44 changes: 22 additions & 22 deletions integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -383,17 +383,17 @@ public void testAvgDoubleInMemory() throws IOException {

@Test
public void testMaxDatePushedDown() throws IOException {
var response = executeQuery(String.format("SELECT max(CAST(date0 AS date))"
var response = executeQuery(String.format("SELECT max(date0)"
+ " from %s", TEST_INDEX_CALCS));
verifySchema(response, schema("max(CAST(date0 AS date))", null, "date"));
verifySchema(response, schema("max(date0)", null, "date"));
verifyDataRows(response, rows("2004-06-19"));
}

@Test
public void testAvgDatePushedDown() throws IOException {
var response = executeQuery(String.format("SELECT avg(CAST(date0 AS date))"
var response = executeQuery(String.format("SELECT avg(date0)"
+ " from %s", TEST_INDEX_CALCS));
verifySchema(response, schema("avg(CAST(date0 AS date))", null, "date"));
verifySchema(response, schema("avg(date0)", null, "date"));
verifyDataRows(response, rows("1992-04-23"));
}

Expand Down Expand Up @@ -423,25 +423,25 @@ public void testAvgDateTimePushedDown() throws IOException {

@Test
public void testMinTimePushedDown() throws IOException {
var response = executeQuery(String.format("SELECT min(CAST(time1 AS time))"
var response = executeQuery(String.format("SELECT min(time1)"
+ " from %s", TEST_INDEX_CALCS));
verifySchema(response, schema("min(CAST(time1 AS time))", null, "time"));
verifySchema(response, schema("min(time1)", null, "time"));
verifyDataRows(response, rows("00:05:57"));
}

@Test
public void testMaxTimePushedDown() throws IOException {
var response = executeQuery(String.format("SELECT max(CAST(time1 AS time))"
var response = executeQuery(String.format("SELECT max(time1)"
+ " from %s", TEST_INDEX_CALCS));
verifySchema(response, schema("max(CAST(time1 AS time))", null, "time"));
verifySchema(response, schema("max(time1)", null, "time"));
verifyDataRows(response, rows("22:50:16"));
}

@Test
public void testAvgTimePushedDown() throws IOException {
var response = executeQuery(String.format("SELECT avg(CAST(time1 AS time))"
var response = executeQuery(String.format("SELECT avg(time1)"
+ " from %s", TEST_INDEX_CALCS));
verifySchema(response, schema("avg(CAST(time1 AS time))", null, "time"));
verifySchema(response, schema("avg(time1)", null, "time"));
verifyDataRows(response, rows("13:06:36.25"));
}

Expand Down Expand Up @@ -471,28 +471,28 @@ public void testAvgTimeStampPushedDown() throws IOException {

@Test
public void testMinDateInMemory() throws IOException {
var response = executeQuery(String.format("SELECT min(CAST(date0 AS date))"
var response = executeQuery(String.format("SELECT min(date0)"
+ " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS));
verifySchema(response,
schema("min(CAST(date0 AS date)) OVER(PARTITION BY datetime1)", null, "date"));
schema("min(date0) OVER(PARTITION BY datetime1)", null, "date"));
verifySome(response.getJSONArray("datarows"), rows("1972-07-04"));
}

@Test
public void testMaxDateInMemory() throws IOException {
var response = executeQuery(String.format("SELECT max(CAST(date0 AS date))"
var response = executeQuery(String.format("SELECT max(date0)"
+ " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS));
verifySchema(response,
schema("max(CAST(date0 AS date)) OVER(PARTITION BY datetime1)", null, "date"));
schema("max(date0) OVER(PARTITION BY datetime1)", null, "date"));
verifySome(response.getJSONArray("datarows"), rows("2004-06-19"));
}

@Test
public void testAvgDateInMemory() throws IOException {
var response = executeQuery(String.format("SELECT avg(CAST(date0 AS date))"
var response = executeQuery(String.format("SELECT avg(date0)"
+ " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS));
verifySchema(response,
schema("avg(CAST(date0 AS date)) OVER(PARTITION BY datetime1)", null, "date"));
schema("avg(date0) OVER(PARTITION BY datetime1)", null, "date"));
verifySome(response.getJSONArray("datarows"), rows("1992-04-23"));
}

Expand Down Expand Up @@ -525,28 +525,28 @@ public void testAvgDateTimeInMemory() throws IOException {

@Test
public void testMinTimeInMemory() throws IOException {
var response = executeQuery(String.format("SELECT min(CAST(time1 AS time))"
var response = executeQuery(String.format("SELECT min(time1)"
+ " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS));
verifySchema(response,
schema("min(CAST(time1 AS time)) OVER(PARTITION BY datetime1)", null, "time"));
schema("min(time1) OVER(PARTITION BY datetime1)", null, "time"));
verifySome(response.getJSONArray("datarows"), rows("00:05:57"));
}

@Test
public void testMaxTimeInMemory() throws IOException {
var response = executeQuery(String.format("SELECT max(CAST(time1 AS time))"
var response = executeQuery(String.format("SELECT max(time1)"
+ " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS));
verifySchema(response,
schema("max(CAST(time1 AS time)) OVER(PARTITION BY datetime1)", null, "time"));
schema("max(time1) OVER(PARTITION BY datetime1)", null, "time"));
verifySome(response.getJSONArray("datarows"), rows("22:50:16"));
}

@Test
public void testAvgTimeInMemory() throws IOException {
var response = executeQuery(String.format("SELECT avg(CAST(time1 AS time))"
var response = executeQuery(String.format("SELECT avg(time1)"
+ " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS));
verifySchema(response,
schema("avg(CAST(time1 AS time)) OVER(PARTITION BY datetime1)", null, "time"));
schema("avg(time1) OVER(PARTITION BY datetime1)", null, "time"));
verifySome(response.getJSONArray("datarows"), rows("13:06:36.25"));
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/


package org.opensearch.sql.sql;

import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS;
import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT;
import static org.opensearch.sql.util.MatcherUtils.rows;
import static org.opensearch.sql.util.MatcherUtils.schema;
import static org.opensearch.sql.util.MatcherUtils.verifyDataRows;
import static org.opensearch.sql.util.MatcherUtils.verifySchema;
import static org.opensearch.sql.util.TestUtils.getResponseBody;

import java.io.IOException;
import java.util.Locale;
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.Response;
import org.opensearch.sql.legacy.SQLIntegTestCase;

public class DateTimeFormatsIT extends SQLIntegTestCase {

@Override
public void init() throws Exception {
super.init();
loadIndex(Index.DATE_FORMATS);
}

@Test
public void testReadingDateFormats() throws IOException {
String query = String.format("SELECT weekyear_week_day, hour_minute_second_millis," +
" strict_ordinal_date_time FROM %s LIMIT 1", TEST_INDEX_DATE_FORMATS);
JSONObject result = executeQuery(query);
verifySchema(result,
schema("weekyear_week_day", null, "date"),
schema("hour_minute_second_millis", null, "time"),
schema("strict_ordinal_date_time", null, "timestamp"));
verifyDataRows(result,
rows("1984-04-12",
"09:07:42",
"1984-04-12 09:07:42.000123456"
));
}

@Test
public void testDateFormatsWithOr() throws IOException {
String query = String.format("SELECT yyyy-MM-dd_OR_epoch_millis FROM %s", TEST_INDEX_DATE_FORMATS);
JSONObject result = executeQuery(query);
verifyDataRows(result,
rows("1984-04-12 00:00:00"),
rows("1984-04-12 09:07:42.000123456"));
}

protected JSONObject executeQuery(String query) throws IOException {
Request request = new Request("POST", QUERY_API_ENDPOINT);
request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query));

RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder();
restOptionsBuilder.addHeader("Content-Type", "application/json");
request.setOptions(restOptionsBuilder);

Response response = client().performRequest(request);
return new JSONObject(getResponseBody(response));
}
}
Loading

0 comments on commit 0e6a648

Please sign in to comment.