diff --git a/docs/reference/sql/functions/geo.asciidoc b/docs/reference/sql/functions/geo.asciidoc new file mode 100644 index 0000000000000..f5ed716eaeb29 --- /dev/null +++ b/docs/reference/sql/functions/geo.asciidoc @@ -0,0 +1,192 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-geo]] +=== Geo Functions + +The geo functions work with geometries stored in `geo_point` and `geo_shape` fields, or returned by other geo functions. + +==== Limitations + +Both <> and <> types are represented in SQL as geometry and can be used +interchangeably with the following exceptions: + +* `geo_shape` fields don't have doc values, therefore these fields cannot be used for filtering, grouping or sorting. + +* `geo_points` fields are indexed and have doc values by default, however only latitude and longitude are stored and + indexed with some loss of precision from the original values (4.190951585769653E-8 for the latitude and + 8.381903171539307E-8 for longitude). The altitude component is accepted but not stored in doc values nor indexed. + Therefore calling `ST_Z` function in the filtering, grouping or sorting will return `null`. + +==== Geometry Conversion + +[[sql-functions-geo-st-as-wkt]] +===== `ST_AsWKT` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_AsWKT(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: string + +.Description: + +Returns the WKT representation of the `geometry`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[aswkt] +-------------------------------------------------- + + +[[sql-functions-geo-st-wkt-to-sql]] +===== `ST_WKTToSQL` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_WKTToSQL(string<1>) +-------------------------------------------------- + +*Input*: + +<1> string WKT representation of geometry + +*Output*: geometry + +.Description: + +Returns the geometry from WKT representation. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[aswkt] +-------------------------------------------------- + +==== Geometry Properties + +[[sql-functions-geo-st-geometrytype]] +===== `ST_GeometryType` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_GeometryType(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: string + +.Description: + +Returns the type of the `geometry` such as POINT, MULTIPOINT, LINESTRING, MULTILINESTRING, POLYGON, MULTIPOLYGON, GEOMETRYCOLLECTION, ENVELOPE or CIRCLE. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[geometrytype] +-------------------------------------------------- + +[[sql-functions-geo-st-x]] +===== `ST_X` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_X(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: double + +.Description: + +Returns the longitude of the first point in the geometry. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[x] +-------------------------------------------------- + +[[sql-functions-geo-st-y]] +===== `ST_Y` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_Y(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: double + +.Description: + +Returns the the latitude of the first point in the geometry. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[y] +-------------------------------------------------- + +[[sql-functions-geo-st-z]] +===== `ST_Z` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_Z(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: double + +.Description: + +Returns the altitude of the first point in the geometry. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[z] +-------------------------------------------------- + +[[sql-functions-geo-st-distance]] +===== `ST_Distance` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_Distance(geometry<1>, geometry<2>) +-------------------------------------------------- + +*Input*: + +<1> source geometry +<2> target geometry + +*Output*: Double + +.Description: + +Returns the distance between geometries in meters. Both geometries have to be points. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[distance] +-------------------------------------------------- \ No newline at end of file diff --git a/docs/reference/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc index 382adeecea4ed..248c47452bab4 100644 --- a/docs/reference/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -136,6 +136,14 @@ ** <> ** <> ** <> +* <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> * <> ** <> ** <> @@ -149,5 +157,6 @@ include::search.asciidoc[] include::math.asciidoc[] include::string.asciidoc[] include::type-conversion.asciidoc[] +include::geo.asciidoc[] include::conditional.asciidoc[] include::system.asciidoc[] diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index 8db4c88f3a11b..ad9b2a320c0c6 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -81,6 +81,8 @@ s|SQL precision | interval_hour_to_minute | 23 | interval_hour_to_second | 23 | interval_minute_to_second | 23 +| geo_point | 52 +| geo_shape | 2,147,483,647 |=== diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index b9c59e31b3d6f..c5b334480c993 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -150,3 +150,14 @@ SELECT count(*) FROM test GROUP BY MINUTE((CAST(date_created AS TIME)); ------------------------------------------------------------- SELECT HISTOGRAM(CAST(birth_date AS TIME), INTERVAL '10' MINUTES) as h, COUNT(*) FROM t GROUP BY h ------------------------------------------------------------- + +[float] +[[geo-sql-limitations]] +=== Geo-related functions + +Since `geo_shape` fields don't have doc values these fields cannot be used for filtering, grouping or sorting. + +By default,`geo_points` fields are indexed and have doc values. However only latitude and longitude are stored and +indexed with some loss of precision from the original values (4.190951585769653E-8 for the latitude and +8.381903171539307E-8 for longitude). The altitude component is accepted but not stored in doc values nor indexed. +Therefore calling `ST_Z` function in the filtering, grouping or sorting will return `null`. diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java index 21d1bd9f25564..9299edc459cb7 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java @@ -20,12 +20,18 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; import java.io.IOException; +import java.io.InputStream; /** * first point of entry for a shape parser @@ -67,4 +73,20 @@ static ShapeBuilder parse(XContentParser parser, BaseGeoShapeFieldMapper shapeMa static ShapeBuilder parse(XContentParser parser) throws IOException { return parse(parser, null); } + + static ShapeBuilder parse(Object value) throws IOException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.startObject(); + content.field("value", value); + content.endObject(); + + try (InputStream stream = BytesReference.bytes(content).streamInput(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + return parse(parser); + } + } } diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index 14d80ab50ee3f..1d13df3b2c32e 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -16,6 +16,7 @@ ext { // SQL test dependency versions csvjdbcVersion="1.0.34" h2Version="1.4.197" + h2gisVersion="1.5.0" } configurations { diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 9a15bcf29c0a1..37e0baf00aa71 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -21,6 +21,9 @@ dependencies { compile (project(':libs:x-content')) { transitive = false } + compile (project(':libs:elasticsearch-geo')) { + transitive = false + } compile project(':libs:core') runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" testCompile "org.elasticsearch.test:framework:${version}" diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java index 52aff352ac182..51a03dad70b55 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java @@ -44,7 +44,9 @@ public enum EsType implements SQLType { INTERVAL_DAY_TO_SECOND( ExtraTypes.INTERVAL_DAY_SECOND), INTERVAL_HOUR_TO_MINUTE( ExtraTypes.INTERVAL_HOUR_MINUTE), INTERVAL_HOUR_TO_SECOND( ExtraTypes.INTERVAL_HOUR_SECOND), - INTERVAL_MINUTE_TO_SECOND(ExtraTypes.INTERVAL_MINUTE_SECOND); + INTERVAL_MINUTE_TO_SECOND(ExtraTypes.INTERVAL_MINUTE_SECOND), + GEO_POINT( ExtraTypes.GEOMETRY), + GEO_SHAPE( ExtraTypes.GEOMETRY); private final Integer type; diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java index 3df70f8e1d956..b8f09ece2f3be 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java @@ -29,5 +29,6 @@ private ExtraTypes() {} static final int INTERVAL_HOUR_MINUTE = 111; static final int INTERVAL_HOUR_SECOND = 112; static final int INTERVAL_MINUTE_SECOND = 113; + static final int GEOMETRY = 114; } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java index 9b1ff87596798..5f2f0773ff17a 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java @@ -3,6 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + package org.elasticsearch.xpack.sql.jdbc; import java.util.Objects; @@ -89,4 +90,4 @@ public boolean equals(Object obj) { public int hashCode() { return Objects.hash(name, type, table, catalog, schema, label, displaySize); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java index 1c216d8dba7c7..c9480dbcb1c2b 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java @@ -35,7 +35,7 @@ / Additional properties can be specified either through the Properties object or in the URL. In case of duplicates, the URL wins. */ //TODO: beef this up for Security/SSL -class JdbcConfiguration extends ConnectionConfiguration { +public class JdbcConfiguration extends ConnectionConfiguration { static final String URL_PREFIX = "jdbc:es://"; public static URI DEFAULT_URI = URI.create("http://localhost:9200/"); @@ -47,7 +47,7 @@ class JdbcConfiguration extends ConnectionConfiguration { // can be out/err/url static final String DEBUG_OUTPUT_DEFAULT = "err"; - static final String TIME_ZONE = "timezone"; + public static final String TIME_ZONE = "timezone"; // follow the JDBC spec and use the JVM default... // to avoid inconsistency, the default is picked up once at startup and reused across connections // to cater to the principle of least surprise diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java index 041c457d91b3d..39d942362d731 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java @@ -190,7 +190,7 @@ public void setObject(int parameterIndex, Object x) throws SQLException { setParam(parameterIndex, null, EsType.NULL); return; } - + // check also here the unsupported types so that any unsupported interfaces ({@code java.sql.Struct}, // {@code java.sql.Array} etc) will generate the correct exception message. Otherwise, the method call // {@code TypeConverter.fromJavaToJDBC(x.getClass())} will report the implementing class as not being supported. @@ -330,7 +330,7 @@ public void setNClob(int parameterIndex, Reader reader, long length) throws SQLE public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { setObject(parameterIndex, xmlObject); } - + @Override public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { setObject(parameterIndex, x, TypeUtils.asSqlType(targetSqlType), scaleOrLength); @@ -343,13 +343,12 @@ public void setObject(int parameterIndex, Object x, SQLType targetSqlType, int s private void setObject(int parameterIndex, Object x, EsType dataType, String typeString) throws SQLException { checkOpen(); - // set the null value on the type and exit if (x == null) { setParam(parameterIndex, null, dataType); return; } - + checkKnownUnsupportedTypes(x); if (x instanceof byte[]) { if (dataType != EsType.BINARY) { @@ -359,7 +358,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ setParam(parameterIndex, x, EsType.BINARY); return; } - + if (x instanceof Timestamp || x instanceof Calendar || x instanceof Date @@ -380,7 +379,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ LocalDateTime ldt = (LocalDateTime) x; Calendar cal = getDefaultCalendar(); cal.set(ldt.getYear(), ldt.getMonthValue() - 1, ldt.getDayOfMonth(), ldt.getHour(), ldt.getMinute(), ldt.getSecond()); - + dateToSet = cal.getTime(); } else if (x instanceof Time) { dateToSet = new java.util.Date(((Time) x).getTime()); @@ -398,7 +397,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ throw new SQLFeatureNotSupportedException( "Conversion from type [" + x.getClass().getName() + "] to [" + typeString + "] not supported"); } - + if (x instanceof Boolean || x instanceof Byte || x instanceof Short @@ -412,7 +411,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ dataType); return; } - + throw new SQLFeatureNotSupportedException( "Conversion from type [" + x.getClass().getName() + "] to [" + typeString + "] not supported"); } @@ -421,14 +420,14 @@ private void checkKnownUnsupportedTypes(Object x) throws SQLFeatureNotSupportedE List> unsupportedTypes = new ArrayList<>(Arrays.asList(Struct.class, Array.class, SQLXML.class, RowId.class, Ref.class, Blob.class, NClob.class, Clob.class, LocalDate.class, LocalTime.class, OffsetTime.class, OffsetDateTime.class, URL.class, BigDecimal.class)); - + for (Class clazz:unsupportedTypes) { if (clazz.isAssignableFrom(x.getClass())) { throw new SQLFeatureNotSupportedException("Objects of type [" + clazz.getName() + "] are not supported"); } } } - + private Calendar getDefaultCalendar() { return Calendar.getInstance(cfg.timeZone(), Locale.ROOT); } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index 9c30241ccbdb1..7e21f2206b1e9 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -5,13 +5,16 @@ */ package org.elasticsearch.xpack.sql.jdbc; +import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.xpack.sql.proto.StringUtils; +import java.io.IOException; import java.sql.Date; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Time; import java.sql.Timestamp; +import java.text.ParseException; import java.time.Duration; import java.time.LocalDate; import java.time.LocalDateTime; @@ -100,6 +103,7 @@ private static T dateTimeConvert(Long millis, Calendar c, Function readScriptSpec() throws Exception { + List list = new ArrayList<>(); + list.addAll(GeoCsvSpecTestCase.readScriptSpec()); + list.addAll(readScriptSpec("/single-node-only/command-sys-geo.csv-spec", specParser())); + return list; + } + + public GeoJdbcCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber, testCase); + } +} diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java new file mode 100644 index 0000000000000..2a9a1592c71d0 --- /dev/null +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.qa.single_node; + +import org.elasticsearch.xpack.sql.qa.geo.GeoSqlSpecTestCase; + +public class GeoJdbcSqlSpecIT extends GeoSqlSpecTestCase { + public GeoJdbcSqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, String query) { + super(fileName, groupName, testName, lineNumber, query); + } +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoCsvSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoCsvSpecTestCase.java new file mode 100644 index 0000000000000..e40e6de9e3a9c --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoCsvSpecTestCase.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.qa.geo; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.client.Request; +import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase; +import org.elasticsearch.xpack.sql.qa.jdbc.SpecBaseIntegrationTestCase; +import org.elasticsearch.xpack.sql.jdbc.JdbcConfiguration; +import org.junit.Before; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.csvConnection; +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.executeCsvQuery; +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.specParser; + +/** + * Tests comparing sql queries executed against our jdbc client + * with hard coded result sets. + */ +public abstract class GeoCsvSpecTestCase extends SpecBaseIntegrationTestCase { + private final CsvTestCase testCase; + + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) + public static List readScriptSpec() throws Exception { + Parser parser = specParser(); + List tests = new ArrayList<>(); + tests.addAll(readScriptSpec("/ogc/ogc.csv-spec", parser)); + tests.addAll(readScriptSpec("/geo/geosql.csv-spec", parser)); + tests.addAll(readScriptSpec("/docs/geo.csv-spec", parser)); + return tests; + } + + public GeoCsvSpecTestCase(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber); + this.testCase = testCase; + } + + + @Before + public void setupTestGeoDataIfNeeded() throws Exception { + if (client().performRequest(new Request("HEAD", "/ogc")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadOGCDatasetIntoEs(client(), "ogc"); + } + if (client().performRequest(new Request("HEAD", "/geo")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadGeoDatasetIntoEs(client(), "geo"); + } + } + + @Override + protected final void doTest() throws Throwable { + try (Connection csv = csvConnection(testCase); + Connection es = esJdbc()) { + + // pass the testName as table for debugging purposes (in case the underlying reader is missing) + ResultSet expected = executeCsvQuery(csv, testName); + ResultSet elasticResults = executeJdbcQuery(es, testCase.query); + assertResults(expected, elasticResults); + } + } + + // make sure ES uses UTC (otherwise JDBC driver picks up the JVM timezone per spec/convention) + @Override + protected Properties connectionProperties() { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); + return connectionProperties; + } + +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java new file mode 100644 index 0000000000000..40e8f64be87cc --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.qa.geo; + +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.sql.qa.jdbc.SqlSpecTestCase; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +import static org.elasticsearch.xpack.sql.qa.jdbc.DataLoader.createString; +import static org.elasticsearch.xpack.sql.qa.jdbc.DataLoader.readFromJarUrl; + +public class GeoDataLoader { + + public static void main(String[] args) throws Exception { + try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { + loadOGCDatasetIntoEs(client, "ogc"); + loadGeoDatasetIntoEs(client, "geo"); + Loggers.getLogger(GeoDataLoader.class).info("Geo data loaded"); + } + } + + protected static void loadOGCDatasetIntoEs(RestClient client, String index) throws Exception { + createIndex(client, index, createOGCIndexRequest()); + loadData(client, index, readResource("/ogc/ogc.json")); + makeFilteredAlias(client, "lakes", index, "\"term\" : { \"ogc_type\" : \"lakes\" }"); + makeFilteredAlias(client, "road_segments", index, "\"term\" : { \"ogc_type\" : \"road_segments\" }"); + makeFilteredAlias(client, "divided_routes", index, "\"term\" : { \"ogc_type\" : \"divided_routes\" }"); + makeFilteredAlias(client, "forests", index, "\"term\" : { \"ogc_type\" : \"forests\" }"); + makeFilteredAlias(client, "bridges", index, "\"term\" : { \"ogc_type\" : \"bridges\" }"); + makeFilteredAlias(client, "streams", index, "\"term\" : { \"ogc_type\" : \"streams\" }"); + makeFilteredAlias(client, "buildings", index, "\"term\" : { \"ogc_type\" : \"buildings\" }"); + makeFilteredAlias(client, "ponds", index, "\"term\" : { \"ogc_type\" : \"ponds\" }"); + makeFilteredAlias(client, "named_places", index, "\"term\" : { \"ogc_type\" : \"named_places\" }"); + makeFilteredAlias(client, "map_neatlines", index, "\"term\" : { \"ogc_type\" : \"map_neatlines\" }"); + } + + private static String createOGCIndexRequest() throws Exception { + XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); + createIndex.startObject("settings"); + { + createIndex.field("number_of_shards", 1); + } + createIndex.endObject(); + createIndex.startObject("mappings"); + { + createIndex.startObject("properties"); + { + // Common + createIndex.startObject("ogc_type").field("type", "keyword").endObject(); + createIndex.startObject("fid").field("type", "integer").endObject(); + createString("name", createIndex); + + // Type specific + createIndex.startObject("shore").field("type", "geo_shape").endObject(); // lakes + + createString("aliases", createIndex); // road_segments + createIndex.startObject("num_lanes").field("type", "integer").endObject(); // road_segments, divided_routes + createIndex.startObject("centerline").field("type", "geo_shape").endObject(); // road_segments, streams + + createIndex.startObject("centerlines").field("type", "geo_shape").endObject(); // divided_routes + + createIndex.startObject("boundary").field("type", "geo_shape").endObject(); // forests, named_places + + createIndex.startObject("position").field("type", "geo_shape").endObject(); // bridges, buildings + + createString("address", createIndex); // buildings + createIndex.startObject("footprint").field("type", "geo_shape").endObject(); // buildings + + createIndex.startObject("type").field("type", "keyword").endObject(); // ponds + createIndex.startObject("shores").field("type", "geo_shape").endObject(); // ponds + + createIndex.startObject("neatline").field("type", "geo_shape").endObject(); // map_neatlines + + } + createIndex.endObject(); + } + createIndex.endObject().endObject(); + return Strings.toString(createIndex); + } + + private static void createIndex(RestClient client, String index, String settingsMappings) throws IOException { + Request createIndexRequest = new Request("PUT", "/" + index); + createIndexRequest.setEntity(new StringEntity(settingsMappings, ContentType.APPLICATION_JSON)); + client.performRequest(createIndexRequest); + } + + static void loadGeoDatasetIntoEs(RestClient client, String index) throws Exception { + createIndex(client, index, readResource("/geo/geosql.json")); + loadData(client, index, readResource("/geo/geosql-bulk.json")); + } + + private static void loadData(RestClient client, String index, String bulk) throws IOException { + Request request = new Request("POST", "/" + index + "/_bulk"); + request.addParameter("refresh", "true"); + request.setJsonEntity(bulk); + Response response = client.performRequest(request); + + if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { + throw new RuntimeException("Cannot load data " + response.getStatusLine()); + } + + String bulkResponseStr = EntityUtils.toString(response.getEntity()); + Map bulkResponseMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, bulkResponseStr, false); + + if ((boolean) bulkResponseMap.get("errors")) { + throw new RuntimeException("Failed to load bulk data " + bulkResponseStr); + } + } + + + public static void makeFilteredAlias(RestClient client, String aliasName, String index, String filter) throws Exception { + Request request = new Request("POST", "/" + index + "/_alias/" + aliasName); + request.setJsonEntity("{\"filter\" : { " + filter + " } }"); + client.performRequest(request); + } + + private static String readResource(String location) throws IOException { + URL dataSet = SqlSpecTestCase.class.getResource(location); + if (dataSet == null) { + throw new IllegalArgumentException("Can't find [" + location + "]"); + } + StringBuilder builder = new StringBuilder(); + try (BufferedReader reader = new BufferedReader(new InputStreamReader(readFromJarUrl(dataSet), StandardCharsets.UTF_8))) { + String line = reader.readLine(); + while(line != null) { + if (line.trim().startsWith("//") == false) { + builder.append(line); + builder.append('\n'); + } + line = reader.readLine(); + } + return builder.toString(); + } + } + +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoSqlSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoSqlSpecTestCase.java new file mode 100644 index 0000000000000..405efac5cac35 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoSqlSpecTestCase.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.qa.geo; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.client.Request; +import org.elasticsearch.xpack.sql.qa.jdbc.LocalH2; +import org.elasticsearch.xpack.sql.qa.jdbc.SpecBaseIntegrationTestCase; +import org.elasticsearch.xpack.sql.jdbc.JdbcConfiguration; +import org.h2gis.functions.factory.H2GISFunctions; +import org.junit.Before; +import org.junit.ClassRule; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.text.NumberFormat; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Properties; + +/** + * Tests comparing geo sql queries executed against our jdbc client + * with those executed against H2GIS's jdbc client. + */ +public abstract class GeoSqlSpecTestCase extends SpecBaseIntegrationTestCase { + private String query; + + @ClassRule + public static LocalH2 H2 = new LocalH2((c) -> { + // Load GIS extensions + H2GISFunctions.load(c); + c.createStatement().execute("RUNSCRIPT FROM 'classpath:/ogc/sqltsch.sql'"); + c.createStatement().execute("RUNSCRIPT FROM 'classpath:/geo/setup_test_geo.sql'"); + }); + + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) + public static List readScriptSpec() throws Exception { + Parser parser = new SqlSpecParser(); + List tests = new ArrayList<>(); + tests.addAll(readScriptSpec("/ogc/ogc.sql-spec", parser)); + tests.addAll(readScriptSpec("/geo/geosql.sql-spec", parser)); + return tests; + } + + @Before + public void setupTestGeoDataIfNeeded() throws Exception { + assumeTrue("Cannot support locales that don't use Hindu-Arabic numerals and non-ascii - sign due to H2", + "-42".equals(NumberFormat.getInstance(Locale.getDefault()).format(-42))); + if (client().performRequest(new Request("HEAD", "/ogc")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadOGCDatasetIntoEs(client(), "ogc"); + } + if (client().performRequest(new Request("HEAD", "/geo")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadGeoDatasetIntoEs(client(), "geo"); + } + } + + + private static class SqlSpecParser implements Parser { + @Override + public Object parse(String line) { + return line.endsWith(";") ? line.substring(0, line.length() - 1) : line; + } + } + + public GeoSqlSpecTestCase(String fileName, String groupName, String testName, Integer lineNumber, String query) { + super(fileName, groupName, testName, lineNumber); + this.query = query; + } + + @Override + protected final void doTest() throws Throwable { + try (Connection h2 = H2.get(); + Connection es = esJdbc()) { + + ResultSet expected, elasticResults; + expected = executeJdbcQuery(h2, query); + elasticResults = executeJdbcQuery(es, query); + + assertResults(expected, elasticResults); + } + } + + // TODO: use UTC for now until deciding on a strategy for handling date extraction + @Override + protected Properties connectionProperties() { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); + return connectionProperties; + } +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java index 6376bd13308d6..daa4e5b4d0c87 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java @@ -46,7 +46,7 @@ private CsvTestUtils() { */ public static ResultSet executeCsvQuery(Connection csv, String csvTableName) throws SQLException { ResultSet expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY) - .executeQuery("SELECT * FROM " + csvTableName); + .executeQuery("SELECT * FROM " + csvTableName); // trigger data loading for type inference expected.beforeFirst(); return expected; @@ -187,13 +187,13 @@ public Object parse(String line) { } else { if (line.endsWith(";")) { - // pick up the query - testCase = new CsvTestCase(); - query.append(line.substring(0, line.length() - 1).trim()); - testCase.query = query.toString(); - testCase.earlySchema = earlySchema.toString(); - earlySchema.setLength(0); - query.setLength(0); + // pick up the query + testCase = new CsvTestCase(); + query.append(line.substring(0, line.length() - 1).trim()); + testCase.query = query.toString(); + testCase.earlySchema = earlySchema.toString(); + earlySchema.setLength(0); + query.setLength(0); } // keep reading the query else { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java index 774a406da863c..ff50a33a0afe8 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java @@ -63,7 +63,7 @@ public static void loadDocsDatasetIntoEs(RestClient client) throws Exception { freeze(client, "archive"); } - private static void createString(String name, XContentBuilder builder) throws Exception { + public static void createString(String name, XContentBuilder builder) throws Exception { builder.startObject(name).field("type", "text") .startObject("fields") .startObject("keyword").field("type", "keyword").endObject() @@ -292,7 +292,7 @@ protected static void loadLibDatasetIntoEs(RestClient client, String index) thro Response response = client.performRequest(request); } - protected static void makeAlias(RestClient client, String aliasName, String... indices) throws Exception { + public static void makeAlias(RestClient client, String aliasName, String... indices) throws Exception { for (String index : indices) { client.performRequest(new Request("POST", "/" + index + "/_alias/" + aliasName)); } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java index 8931fe0264e9d..76894fc5a53d5 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java @@ -8,18 +8,25 @@ import com.carrotsearch.hppc.IntObjectHashMap; import org.apache.logging.log4j.Logger; +import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.geometry.Point; +import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.xpack.sql.jdbc.EsType; import org.elasticsearch.xpack.sql.proto.StringUtils; import org.relique.jdbc.csv.CsvResultSet; +import java.io.IOException; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; +import java.text.ParseException; import java.time.temporal.TemporalAmount; import java.util.ArrayList; +import java.util.Calendar; import java.util.List; import java.util.Locale; +import java.util.TimeZone; import static java.lang.String.format; import static java.sql.Types.BIGINT; @@ -29,6 +36,8 @@ import static java.sql.Types.REAL; import static java.sql.Types.SMALLINT; import static java.sql.Types.TINYINT; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -38,6 +47,7 @@ * Utility class for doing JUnit-style asserts over JDBC. */ public class JdbcAssert { + private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); private static final IntObjectHashMap SQL_TO_TYPE = new IntObjectHashMap<>(); @@ -139,6 +149,11 @@ public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, expectedType = Types.TIMESTAMP; } + // H2 treats GEOMETRY as OTHER + if (expectedType == Types.OTHER && nameOf(actualType).startsWith("GEO_") ) { + actualType = Types.OTHER; + } + // since csv doesn't support real, we use float instead..... if (expectedType == Types.FLOAT && expected instanceof CsvResultSet) { expectedType = Types.REAL; @@ -251,6 +266,24 @@ else if (type == Types.DOUBLE) { assertEquals(msg, (double) expectedObject, (double) actualObject, lenientFloatingNumbers ? 1d : 0.0d); } else if (type == Types.FLOAT) { assertEquals(msg, (float) expectedObject, (float) actualObject, lenientFloatingNumbers ? 1f : 0.0f); + } else if (type == Types.OTHER) { + if (actualObject instanceof Geometry) { + // We need to convert the expected object to libs/geo Geometry for comparision + try { + expectedObject = WellKnownText.fromWKT(expectedObject.toString()); + } catch (IOException | ParseException ex) { + fail(ex.getMessage()); + } + } + if (actualObject instanceof Point) { + // geo points are loaded form doc values where they are stored as long-encoded values leading + // to lose in precision + assertThat(expectedObject, instanceOf(Point.class)); + assertEquals(((Point) expectedObject).getLat(), ((Point) actualObject).getLat(), 0.000001d); + assertEquals(((Point) expectedObject).getLon(), ((Point) actualObject).getLon(), 0.000001d); + } else { + assertEquals(msg, expectedObject, actualObject); + } } // intervals else if (type == Types.VARCHAR && actualObject instanceof TemporalAmount) { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java index e6295985cf519..2f3ce7eaddd88 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java @@ -81,4 +81,4 @@ protected void after() { public Connection get() throws SQLException { return DriverManager.getConnection(url, DEFAULTS); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec index 9f63de97c9928..073788511d0f0 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec @@ -129,11 +129,20 @@ RIGHT |SCALAR RTRIM |SCALAR SPACE |SCALAR SUBSTRING |SCALAR -UCASE |SCALAR +UCASE |SCALAR CAST |SCALAR CONVERT |SCALAR DATABASE |SCALAR USER |SCALAR +ST_ASTEXT |SCALAR +ST_ASWKT |SCALAR +ST_DISTANCE |SCALAR +ST_GEOMETRYTYPE |SCALAR +ST_GEOMFROMTEXT |SCALAR +ST_WKTTOSQL |SCALAR +ST_X |SCALAR +ST_Y |SCALAR +ST_Z |SCALAR SCORE |SCORE ; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index c2432007bff35..936c7eef88191 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -201,7 +201,7 @@ showFunctions // tag::showFunctions SHOW FUNCTIONS; - name | type + name | type -----------------+--------------- AVG |AGGREGATE COUNT |AGGREGATE @@ -325,13 +325,21 @@ RIGHT |SCALAR RTRIM |SCALAR SPACE |SCALAR SUBSTRING |SCALAR -UCASE |SCALAR +UCASE |SCALAR CAST |SCALAR CONVERT |SCALAR DATABASE |SCALAR USER |SCALAR +ST_ASTEXT |SCALAR +ST_ASWKT |SCALAR +ST_DISTANCE |SCALAR +ST_GEOMETRYTYPE |SCALAR +ST_GEOMFROMTEXT |SCALAR +ST_WKTTOSQL |SCALAR +ST_X |SCALAR +ST_Y |SCALAR +ST_Z |SCALAR SCORE |SCORE - // end::showFunctions ; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/geo.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/geo.csv-spec new file mode 100644 index 0000000000000..60fbebfc13950 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/geo.csv-spec @@ -0,0 +1,79 @@ +// +// CSV spec used by the geo docs +// + +/////////////////////////////// +// +// ST_AsWKT() +// +/////////////////////////////// + +selectAsWKT +// tag::aswkt +SELECT city, ST_AsWKT(location) location FROM "geo" WHERE city = 'Amsterdam'; + + city:s | location:s +Amsterdam |point (4.850311987102032 52.347556999884546) +// end::aswkt +; + +selectWKTToSQL +// tag::wkttosql +SELECT CAST(ST_WKTToSQL('POINT (10 20)') AS STRING) location; + + location:s +point (10.0 20.0) +// end::wkttosql +; + + +selectDistance +// tag::distance +SELECT ST_Distance(ST_WKTToSQL('POINT (10 20)'), ST_WKTToSQL('POINT (20 30)')) distance; + + distance:d +1499101.2889383635 +// end::distance +; + +/////////////////////////////// +// +// Geometry Properties +// +/////////////////////////////// + +selectGeometryType +// tag::geometrytype +SELECT ST_GeometryType(ST_WKTToSQL('POINT (10 20)')) type; + + type:s +POINT +// end::geometrytype +; + +selectX +// tag::x +SELECT ST_X(ST_WKTToSQL('POINT (10 20)')) x; + + x:d +10.0 +// end::x +; + +selectY +// tag::y +SELECT ST_Y(ST_WKTToSQL('POINT (10 20)')) y; + + y:d +20.0 +// end::y +; + +selectZ +// tag::z +SELECT ST_Z(ST_WKTToSQL('POINT (10 20 30)')) z; + + z:d +30.0 +// end::z +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv b/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv new file mode 100644 index 0000000000000..8275bd7c884ef --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv @@ -0,0 +1,16 @@ +city,region,region_point,location,shape +Mountain View,Americas,POINT(-105.2551 54.5260),point (-122.083843 37.386483),point (-122.083843 37.386483) +Chicago,Americas,POINT(-105.2551 54.5260),point (-87.637874 41.888783),point (-87.637874 41.888783) +New York,Americas,POINT(-105.2551 54.5260),point (-73.990027 40.745171),point (-73.990027 40.745171) +San Francisco,Americas,POINT(-105.2551 54.5260),point (-122.394228 37.789541),point (-122.394228 37.789541) +Phoenix,Americas,POINT(-105.2551 54.5260),point (-111.973505 33.376242),point (-111.973505 33.376242) +Amsterdam,Europe,POINT(15.2551 54.5260),point (4.850312 52.347557),point (4.850312 52.347557) +Berlin,Europe,POINT(15.2551 54.5260),point (13.390889 52.486701),point (13.390889 52.486701) +Munich,Europe,POINT(15.2551 54.5260),point (11.537505 48.146321),point (11.537505 48.146321) +London,Europe,POINT(15.2551 54.5260),point (-0.121672 51.510871),point (-0.121672 51.510871) +Paris,Europe,POINT(15.2551 54.5260),point (2.351773 48.845538),point (2.351773 48.845538) +Singapore,Asia,POINT(100.6197 34.0479),point (103.855535 1.295868),point (103.855535 1.295868) +Hong Kong,Asia,POINT(100.6197 34.0479),point (114.183925 22.281397),point (114.183925 22.281397) +Seoul,Asia,POINT(100.6197 34.0479),point (127.060851 37.509132),point (127.060851 37.509132) +Tokyo,Asia,POINT(100.6197 34.0479),point (139.76402225 35.669616),point (139.76402225 35.669616) +Sydney,Asia,POINT(100.6197 34.0479),point (151.208629 -33.863385),point (151.208629 -33.863385) diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json new file mode 100644 index 0000000000000..8c65742aac063 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json @@ -0,0 +1,33 @@ +{"index":{"_id": "1"}} +{"region": "Americas", "city": "Mountain View", "location": {"lat":"37.386483", "lon":"-122.083843"}, "location_no_dv": {"lat":"37.386483", "lon":"-122.083843"}, "shape": "POINT (-122.083843 37.386483 30)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "2"}} +{"region": "Americas", "city": "Chicago", "location": [-87.637874, 41.888783], "location_no_dv": [-87.637874, 41.888783], "shape": {"type" : "point", "coordinates" : [-87.637874, 41.888783, 181]}, "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "3"}} +{"region": "Americas", "city": "New York", "location": "40.745171,-73.990027", "location_no_dv": "40.745171,-73.990027", "shape": "POINT (-73.990027 40.745171 10)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "4"}} +{"region": "Americas", "city": "San Francisco", "location": "37.789541,-122.394228", "location_no_dv": "37.789541,-122.394228", "shape": "POINT (-122.394228 37.789541 16)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "5"}} +{"region": "Americas", "city": "Phoenix", "location": "33.376242,-111.973505", "location_no_dv": "33.376242,-111.973505", "shape": "POINT (-111.973505 33.376242 331)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "6"}} +{"region": "Europe", "city": "Amsterdam", "location": "52.347557,4.850312", "location_no_dv": "52.347557,4.850312", "shape": "POINT (4.850312 52.347557 2)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "7"}} +{"region": "Europe", "city": "Berlin", "location": "52.486701,13.390889", "location_no_dv": "52.486701,13.390889", "shape": "POINT (13.390889 52.486701 34)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "8"}} +{"region": "Europe", "city": "Munich", "location": "48.146321,11.537505", "location_no_dv": "48.146321,11.537505", "shape": "POINT (11.537505 48.146321 519)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "9"}} +{"region": "Europe", "city": "London", "location": "51.510871,-0.121672", "location_no_dv": "51.510871,-0.121672", "shape": "POINT (-0.121672 51.510871 11)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "10"}} +{"region": "Europe", "city": "Paris", "location": "48.845538,2.351773", "location_no_dv": "48.845538,2.351773", "shape": "POINT (2.351773 48.845538 35)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "11"}} +{"region": "Asia", "city": "Singapore", "location": "1.295868,103.855535", "location_no_dv": "1.295868,103.855535", "shape": "POINT (103.855535 1.295868 15)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "12"}} +{"region": "Asia", "city": "Hong Kong", "location": "22.281397,114.183925", "location_no_dv": "22.281397,114.183925", "shape": "POINT (114.183925 22.281397 552)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "13"}} +{"region": "Asia", "city": "Seoul", "location": "37.509132,127.060851", "location_no_dv": "37.509132,127.060851", "shape": "POINT (127.060851 37.509132 38)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "14"}} +{"region": "Asia", "city": "Tokyo", "location": "35.669616,139.76402225", "location_no_dv": "35.669616,139.76402225", "shape": "POINT (139.76402225 35.669616 40)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "15"}} +{"region": "Asia", "city": "Sydney", "location": "-33.863385,151.208629", "location_no_dv": "-33.863385,151.208629", "shape": "POINT (151.208629 -33.863385 100)", "region_point": "POINT(100.6197 34.0479)"} + + + diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec new file mode 100644 index 0000000000000..31f3857216c0b --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec @@ -0,0 +1,288 @@ +// +// Commands on geo test data +// + +showTables +SHOW TABLES "geo"; + + name:s | type:s | kind:s +geo |BASE TABLE |INDEX +; + +// DESCRIBE + +describe +DESCRIBE "geo"; + + column:s | type:s | mapping:s +city | VARCHAR | keyword +location | GEOMETRY | geo_point +location_no_dv | GEOMETRY | geo_point +region | VARCHAR | keyword +region_point | VARCHAR | keyword +shape | GEOMETRY | geo_shape +; + +// SELECT ALL +// TODO: For now we just get geopoint formatted as is and we also need to convert it to STRING to work with CSV + +selectAllPointsAsStrings +SELECT city, CAST(location AS STRING) location, CAST(location_no_dv AS STRING) location_no_dv, CAST(shape AS STRING) shape, region FROM "geo" ORDER BY "city"; + + city:s | location:s | location_no_dv:s | shape:s | region:s +Amsterdam |point (4.850311987102032 52.347556999884546) |point (4.850312 52.347557) |point (4.850312 52.347557 2.0) |Europe +Berlin |point (13.390888944268227 52.48670099303126) |point (13.390889 52.486701) |point (13.390889 52.486701 34.0) |Europe +Chicago |point (-87.63787407428026 41.888782968744636) |point (-87.637874 41.888783) |point (-87.637874 41.888783 181.0) |Americas +Hong Kong |point (114.18392493389547 22.28139698971063) |point (114.183925 22.281397) |point (114.183925 22.281397 552.0) |Asia +London |point (-0.12167204171419144 51.51087098289281)|point (-0.121672 51.510871) |point (-0.121672 51.510871 11.0) |Europe +Mountain View |point (-122.08384302444756 37.38648299127817) |point (-122.083843 37.386483) |point (-122.083843 37.386483 30.0) |Americas +Munich |point (11.537504978477955 48.14632098656148) |point (11.537505 48.146321) |point (11.537505 48.146321 519.0) |Europe +New York |point (-73.9900270756334 40.74517097789794) |point (-73.990027 40.745171) |point (-73.990027 40.745171 10.0) |Americas +Paris |point (2.3517729341983795 48.84553796611726) |point (2.351773 48.845538) |point (2.351773 48.845538 35.0) |Europe +Phoenix |point (-111.97350500151515 33.37624196894467) |point (-111.973505 33.376242) |point (-111.973505 33.376242 331.0) |Americas +San Francisco |point (-122.39422800019383 37.789540970698) |point (-122.394228 37.789541) |point (-122.394228 37.789541 16.0) |Americas +Seoul |point (127.06085099838674 37.50913198571652) |point (127.060851 37.509132) |point (127.060851 37.509132 38.0) |Asia +Singapore |point (103.8555349688977 1.2958679627627134) |point (103.855535 1.295868) |point (103.855535 1.295868 15.0) |Asia +Sydney |point (151.20862897485495 -33.863385021686554)|point (151.208629 -33.863385) |point (151.208629 -33.863385 100.0) |Asia +Tokyo |point (139.76402222178876 35.66961596254259) |point (139.76402225 35.669616)|point (139.76402225 35.669616 40.0) |Asia +; + +// TODO: Both shape and location contain the same data for now, we should change it later to make things more interesting +selectAllPointsAsWKT +SELECT city, ST_ASWKT(location) location_wkt, ST_ASWKT(shape) shape_wkt, region FROM "geo" ORDER BY "city"; + + city:s | location_wkt:s | shape_wkt:s | region:s +Amsterdam |point (4.850311987102032 52.347556999884546) |point (4.850312 52.347557 2.0) |Europe +Berlin |point (13.390888944268227 52.48670099303126) |point (13.390889 52.486701 34.0) |Europe +Chicago |point (-87.63787407428026 41.888782968744636) |point (-87.637874 41.888783 181.0) |Americas +Hong Kong |point (114.18392493389547 22.28139698971063) |point (114.183925 22.281397 552.0) |Asia +London |point (-0.12167204171419144 51.51087098289281)|point (-0.121672 51.510871 11.0) |Europe +Mountain View |point (-122.08384302444756 37.38648299127817) |point (-122.083843 37.386483 30.0) |Americas +Munich |point (11.537504978477955 48.14632098656148) |point (11.537505 48.146321 519.0) |Europe +New York |point (-73.9900270756334 40.74517097789794) |point (-73.990027 40.745171 10.0) |Americas +Paris |point (2.3517729341983795 48.84553796611726) |point (2.351773 48.845538 35.0) |Europe +Phoenix |point (-111.97350500151515 33.37624196894467) |point (-111.973505 33.376242 331.0) |Americas +San Francisco |point (-122.39422800019383 37.789540970698) |point (-122.394228 37.789541 16.0) |Americas +Seoul |point (127.06085099838674 37.50913198571652) |point (127.060851 37.509132 38.0) |Asia +Singapore |point (103.8555349688977 1.2958679627627134) |point (103.855535 1.295868 15.0) |Asia +Sydney |point (151.20862897485495 -33.863385021686554)|point (151.208629 -33.863385 100.0) |Asia +Tokyo |point (139.76402222178876 35.66961596254259) |point (139.76402225 35.669616 40.0) |Asia +; + +selectWithAsWKTInWhere +SELECT city, ST_ASWKT(location) location_wkt, region FROM "geo" WHERE LOCATE('114', ST_ASWKT(location)) > 0 ORDER BY "city"; + + city:s | location_wkt:s | region:s +Hong Kong |point (114.18392493389547 22.28139698971063)|Asia +; + +selectAllPointsOrderByLonFromAsWKT +SELECT city, SUBSTRING(ST_ASWKT(location), 8, LOCATE(' ', ST_ASWKT(location), 8) - 8) lon FROM "geo" ORDER BY lon; + + city:s | lon:s +London |-0.12167204171419144 +Phoenix |-111.97350500151515 +Mountain View |-122.08384302444756 +San Francisco |-122.39422800019383 +New York |-73.9900270756334 +Chicago |-87.63787407428026 +Singapore |103.8555349688977 +Munich |11.537504978477955 +Hong Kong |114.18392493389547 +Seoul |127.06085099838674 +Berlin |13.390888944268227 +Tokyo |139.76402222178876 +Sydney |151.20862897485495 +Paris |2.3517729341983795 +Amsterdam |4.850311987102032 +; + +selectAllPointsGroupByHemisphereFromAsWKT +SELECT COUNT(city) count, CAST(SUBSTRING(ST_ASWKT(location), 8, 1) = '-' AS STRING) west FROM "geo" GROUP BY west ORDER BY west; + + count:l | west:s +9 |false +6 |true +; + +selectRegionUsingWktToSql +SELECT region, city, ST_ASWKT(ST_WKTTOSQL(region_point)) region_wkt FROM geo ORDER BY region, city; + + region:s | city:s | region_wkt:s +Americas |Chicago |point (-105.2551 54.526) +Americas |Mountain View |point (-105.2551 54.526) +Americas |New York |point (-105.2551 54.526) +Americas |Phoenix |point (-105.2551 54.526) +Americas |San Francisco |point (-105.2551 54.526) +Asia |Hong Kong |point (100.6197 34.0479) +Asia |Seoul |point (100.6197 34.0479) +Asia |Singapore |point (100.6197 34.0479) +Asia |Sydney |point (100.6197 34.0479) +Asia |Tokyo |point (100.6197 34.0479) +Europe |Amsterdam |point (15.2551 54.526) +Europe |Berlin |point (15.2551 54.526) +Europe |London |point (15.2551 54.526) +Europe |Munich |point (15.2551 54.526) +Europe |Paris |point (15.2551 54.526) +; + +selectCitiesWithAGroupByWktToSql +SELECT COUNT(city) city_by_region, CAST(ST_WKTTOSQL(region_point) AS STRING) region FROM geo WHERE city LIKE '%a%' GROUP BY ST_WKTTOSQL(region_point) ORDER BY ST_WKTTOSQL(region_point); + + city_by_region:l | region:s +3 |point (-105.2551 54.526) +1 |point (100.6197 34.0479) +2 |point (15.2551 54.526) +; + +selectCitiesWithEOrderByWktToSql +SELECT region, city FROM geo WHERE city LIKE '%e%' ORDER BY ST_WKTTOSQL(region_point), city; + + region:s | city:s +Americas |Mountain View +Americas |New York +Americas |Phoenix +Asia |Seoul +Asia |Singapore +Asia |Sydney +Europe |Amsterdam +Europe |Berlin +; + + +selectCitiesByDistance +SELECT region, city, ST_Distance(location, ST_WktToSQL('POINT (-71 42)')) distance FROM geo WHERE distance < 5000000 ORDER BY region, city; + + region:s | city:s | distance:d +Americas |Chicago |1373941.5140200066 +Americas |Mountain View |4335936.909375596 +Americas |New York |285839.6579622518 +Americas |Phoenix |3692895.0346903414 +Americas |San Francisco |4343565.010996301 +; + +selectCitiesByDistanceFloored +SELECT region, city, FLOOR(ST_Distance(location, ST_WktToSQL('POINT (-71 42)'))) distance FROM geo WHERE distance < 5000000 ORDER BY region, city; + + region:s | city:s | distance:l +Americas |Chicago |1373941 +Americas |Mountain View |4335936 +Americas |New York |285839 +Americas |Phoenix |3692895 +Americas |San Francisco |4343565 +; + +selectCitiesOrderByDistance +SELECT region, city FROM geo ORDER BY ST_Distance(location, ST_WktToSQL('POINT (-71 42)')) ; + + region:s | city:s +Americas |New York +Americas |Chicago +Americas |Phoenix +Americas |Mountain View +Americas |San Francisco +Europe |London +Europe |Paris +Europe |Amsterdam +Europe |Berlin +Europe |Munich +Asia |Tokyo +Asia |Seoul +Asia |Hong Kong +Asia |Singapore +Asia |Sydney +; + +groupCitiesByDistance +SELECT COUNT(*) count, FIRST(region) region FROM geo GROUP BY FLOOR(ST_Distance(location, ST_WktToSQL('POINT (-71 42)'))/5000000); + + count:l | region:s +5 |Americas +5 |Europe +3 |Asia +2 |Asia +; + +selectWktToSqlOfNull +SELECT ST_ASWKT(ST_WktToSql(NULL)) shape; + shape:s +null +; + +selectWktToSqlOfNull +SELECT ST_Distance(ST_WktToSql(NULL), ST_WktToSQL('POINT (-71 42)')) shape; + shape:d +null +; + +groupByGeometryType +SELECT COUNT(*) cnt, ST_GeometryType(location) gt FROM geo GROUP BY ST_GeometryType(location); + + cnt:l | gt:s +15 |POINT +; + + +groupAndOrderByGeometryType +SELECT COUNT(*) cnt, ST_GeometryType(location) gt FROM geo GROUP BY gt ORDER BY gt; + + cnt:l | gt:s +15 |POINT +; + +groupByEastWest +SELECT COUNT(*) cnt, FLOOR(ST_X(location)/90) east FROM geo GROUP BY east ORDER BY east; + + cnt:l | east:l +3 |-2 +3 |-1 +4 |0 +5 |1 +; + +groupByNorthSouth +SELECT COUNT(*) cnt, FLOOR(ST_Y(location)/45) north FROM geo GROUP BY north ORDER BY north; + + cnt:l | north:l +1 |-1 +9 |0 +5 |1 +; + +groupByNorthEastSortByEastNorth +SELECT COUNT(*) cnt, FLOOR(ST_Y(location)/45) north, FLOOR(ST_X(location)/90) east FROM geo GROUP BY north, east ORDER BY east, north; + + cnt:l | north:l | east:l +3 |0 |-2 +2 |0 |-1 +1 |1 |-1 +4 |1 |0 +1 |-1 |1 +4 |0 |1 +; + +selectFilterByXOfLocation +SELECT city, ST_X(shape) x, ST_Y(shape) y, ST_Z(shape) z, ST_X(location) lx, ST_Y(location) ly FROM geo WHERE lx > 0 ORDER BY ly; + + city:s | x:d | y:d | z:d | lx:d | ly:d +Sydney |151.208629 |-33.863385 |100.0 |151.20862897485495|-33.863385021686554 +Singapore |103.855535 |1.295868 |15.0 |103.8555349688977 |1.2958679627627134 +Hong Kong |114.183925 |22.281397 |552.0 |114.18392493389547|22.28139698971063 +Tokyo |139.76402225 |35.669616 |40.0 |139.76402222178876|35.66961596254259 +Seoul |127.060851 |37.509132 |38.0 |127.06085099838674|37.50913198571652 +Munich |11.537505 |48.146321 |519.0 |11.537504978477955|48.14632098656148 +Paris |2.351773 |48.845538 |35.0 |2.3517729341983795|48.84553796611726 +Amsterdam |4.850312 |52.347557 |2.0 |4.850311987102032 |52.347556999884546 +Berlin |13.390889 |52.486701 |34.0 |13.390888944268227|52.48670099303126 +; + +selectFilterByRegionPoint +SELECT city, region, ST_X(location) x FROM geo WHERE ST_X(ST_WKTTOSQL(region_point)) < 0 ORDER BY x; + + city:s | region:s | x:d +San Francisco |Americas |-122.39422800019383 +Mountain View |Americas |-122.08384302444756 +Phoenix |Americas |-111.97350500151515 +Chicago |Americas |-87.63787407428026 +New York |Americas |-73.9900270756334 +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json new file mode 100644 index 0000000000000..56007a0284c43 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json @@ -0,0 +1,28 @@ +{ + "settings": { + "number_of_shards": 1 + }, + "mappings": { + "properties": { + "region": { + "type": "keyword" + }, + "city": { + "type": "keyword" + }, + "location": { + "type": "geo_point" + }, + "location_no_dv": { + "type": "geo_point", + "doc_values": "false" + }, + "shape": { + "type": "geo_shape" + }, + "region_point": { + "type": "keyword" + } + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.sql-spec new file mode 100644 index 0000000000000..e801d8477f6bf --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.sql-spec @@ -0,0 +1,24 @@ +// +// Commands on geo test data +// + +selectAllShapesAsGeometries +SELECT city, shape, region FROM "geo" ORDER BY "city"; + +selectAllShapesAsWKT +SELECT city, ST_GEOMFROMTEXT(ST_ASWKT(shape)) shape_wkt, region FROM "geo" ORDER BY "city"; + +selectAllPointsAsGeometries +SELECT city, location, region FROM "geo" ORDER BY "city"; + +selectAllPointsAsWKT +SELECT city, ST_GEOMFROMTEXT(ST_ASWKT(location)) shape_wkt, region FROM "geo" ORDER BY "city"; + +selectRegionUsingWktToSqlWithoutConvertion +SELECT region, city, shape, ST_GEOMFROMTEXT(region_point) region_wkt FROM geo ORDER BY region, city; + +selectCitiesWithGroupByWktToSql +SELECT COUNT(city) city_by_region, ST_GEOMFROMTEXT(region_point) region_geom FROM geo WHERE city LIKE '%a%' GROUP BY region_geom ORDER BY city_by_region; + +selectCitiesWithOrderByWktToSql +SELECT region, city, UCASE(ST_ASWKT(ST_GEOMFROMTEXT(region_point))) region_wkt FROM geo WHERE city LIKE '%e%' ORDER BY region_wkt, city; diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql new file mode 100644 index 0000000000000..b8b8d4e36f453 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS "geo"; +CREATE TABLE "geo" ( + "city" VARCHAR(50), + "region" VARCHAR(50), + "region_point" VARCHAR(50), + "location" POINT, + "shape" GEOMETRY +) + AS SELECT * FROM CSVREAD('classpath:/geo/geo.csv'); diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/OGC-NOTICE.txt b/x-pack/plugin/sql/qa/src/main/resources/ogc/OGC-NOTICE.txt new file mode 100644 index 0000000000000..ac061f5cc4493 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/OGC-NOTICE.txt @@ -0,0 +1,41 @@ +Software Notice + +This OGC work (including software, documents, or other related items) is being +provided by the copyright holders under the following license. By obtaining, +using and/or copying this work, you (the licensee) agree that you have read, +understood, and will comply with the following terms and conditions: + +Permission to use, copy, and modify this software and its documentation, with +or without modification, for any purpose and without fee or royalty is hereby +granted, provided that you include the following on ALL copies of the software +and documentation or portions thereof, including modifications, that you make: + +1. The full text of this NOTICE in a location viewable to users of the +redistributed or derivative work. + +2. Any pre-existing intellectual property disclaimers, notices, or terms and +conditions. If none exist, a short notice of the following form (hypertext is +preferred, text is permitted) should be used within the body of any +redistributed or derivative code: "Copyright © [$date-of-document] Open +Geospatial Consortium, Inc. All Rights Reserved. +http://www.opengeospatial.org/ogc/legal (Hypertext is preferred, but a textual +representation is permitted.) + +3. Notice of any changes or modifications to the OGC files, including the date +changes were made. (We recommend you provide URIs to the location from which +the code is derived.) + + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE +NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO, WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT +THE USE OF THE SOFTWARE OR DOCUMENTATION WILL NOT INFRINGE ANY THIRD PARTY +ATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. + +COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR DOCUMENTATION. + +The name and trademarks of copyright holders may NOT be used in advertising or +publicity pertaining to the software without specific, written prior permission. +Title to copyright in this software and any associated documentation will at all +times remain with copyright holders. \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec new file mode 100644 index 0000000000000..f1941161697d2 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec @@ -0,0 +1,36 @@ +// +// Commands on OGC data +// + +showTables +SHOW TABLES "ogc"; + + name:s | type:s | kind:s +ogc |BASE TABLE |INDEX +; + +// DESCRIBE + +describe +DESCRIBE "ogc"; + + column:s | type:s | mapping:s +address | VARCHAR | text +address.keyword | VARCHAR | keyword +aliases | VARCHAR | text +aliases.keyword | VARCHAR | keyword +boundary | GEOMETRY | geo_shape +centerline | GEOMETRY | geo_shape +centerlines | GEOMETRY | geo_shape +fid | INTEGER | integer +footprint | GEOMETRY | geo_shape +name | VARCHAR | text +name.keyword | VARCHAR | keyword +neatline | GEOMETRY | geo_shape +num_lanes | INTEGER | integer +ogc_type | VARCHAR | keyword +position | GEOMETRY | geo_shape +shore | GEOMETRY | geo_shape +shores | GEOMETRY | geo_shape +type | VARCHAR | keyword +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.json b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.json new file mode 100644 index 0000000000000..afdf2f5d61ac6 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.json @@ -0,0 +1,58 @@ +// This dataset is derived from OpenGIS Simple Features for SQL (Types and Functions) Test Suite on Apr 1, 2018 +// +// Copyright © 2018 Open Geospatial Consortium, Inc. All Rights Reserved. +// http://www.opengeospatial.org/ogc/legal +// +// lakes +{"index":{"_id": "101"}} +{"ogc_type":"lakes", "fid": 101, "name": "BLUE LAKE", "shore": "POLYGON ((52 18, 66 23, 73 9, 48 6, 52 18), (59 18, 67 18, 67 13, 59 13, 59 18))"} +// +// road segments +{"index":{"_id": "102"}} +{"ogc_type":"road_segments", "fid": 102, "name": "Route 5", "num_lanes": 2, "centerline": "LINESTRING (0 18, 10 21, 16 23, 28 26, 44 31)"} +{"index":{"_id": "103"}} +{"ogc_type":"road_segments", "fid": 103, "name": "Route 5", "aliases": "Main Street", "num_lanes": 4, "centerline": "LINESTRING (44 31, 56 34, 70 38)"} +{"index":{"_id": "104"}} +{"ogc_type":"road_segments", "fid": 104, "name": "Route 5", "num_lanes": 2, "centerline": "LINESTRING (70 38, 72 48)"} +{"index":{"_id": "105"}} +{"ogc_type":"road_segments", "fid": 105, "name": "Main Street", "num_lanes": 4, "centerline": "LINESTRING (70 38, 84 42)"} +{"index":{"_id": "106"}} +{"ogc_type":"road_segments", "fid": 106, "name": "Dirt Road by Green Forest", "num_lanes": 1, "centerline": "LINESTRING (28 26, 28 0)"} +// +// divided routes +{"index":{"_id": "119"}} +{"ogc_type":"divided_routes", "fid": 119, "name": "Route 75", "num_lanes": 4, "centerlines": "MULTILINESTRING ((10 48, 10 21, 10 0), (16 0, 16 23, 16 48))"} +// +// forests +{"index":{"_id": "109"}} +{"ogc_type":"forests", "fid": 109, "name": "Green Forest", "boundary": "MULTIPOLYGON (((28 26, 28 0, 84 0, 84 42, 28 26), (52 18, 66 23, 73 9, 48 6, 52 18)), ((59 18, 67 18, 67 13, 59 13, 59 18)))"} +// +// forests +{"index":{"_id": "110"}} +{"ogc_type":"bridges", "fid": 110, "name": "Cam Bridge", "position": "POINT (44 31)"} +// +// streams +{"index":{"_id": "111"}} +{"ogc_type":"streams", "fid": 111, "name": "Cam Stream", "centerline": "LINESTRING (38 48, 44 41, 41 36, 44 31, 52 18)"} +{"index":{"_id": "112"}} +{"ogc_type":"streams", "fid": 112, "centerline": "LINESTRING (76 0, 78 4, 73 9)"} +// +// buildings +{"index":{"_id": "113"}} +{"ogc_type":"buildings", "fid": 113, "address": "123 Main Street", "position": "POINT (52 30)", "footprint": "POLYGON ((50 31, 54 31, 54 29, 50 29, 50 31))"} +{"index":{"_id": "114"}} +{"ogc_type":"buildings", "fid": 114, "address": "215 Main Street", "position": "POINT (64 33)", "footprint": "POLYGON ((66 34, 62 34, 62 32, 66 32, 66 34))"} +// +// ponds +{"index":{"_id": "120"}} +{"ogc_type":"ponds", "fid": 120, "type": "Stock Pond", "shores": "MULTIPOLYGON (((24 44, 22 42, 24 40, 24 44)), ((26 44, 26 40, 28 42, 26 44)))"} +// +// named places +{"index":{"_id": "117"}} +{"ogc_type":"named_places", "fid": 117, "name": "Ashton", "boundary": "POLYGON ((62 48, 84 48, 84 30, 56 30, 56 34, 62 48))"} +{"index":{"_id": "118"}} +{"ogc_type":"named_places", "fid": 118, "name": "Goose Island", "boundary": "POLYGON ((67 13, 67 18, 59 18, 59 13, 67 13))"} +// +// map neat lines +{"index":{"_id": "115"}} +{"ogc_type":"map_neatlines", "fid": 115, "neatline": "POLYGON ((0 0, 0 48, 84 48, 84 0, 0 0))"} diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.sql-spec new file mode 100644 index 0000000000000..3976c5a8b181e --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.sql-spec @@ -0,0 +1,85 @@ +// +// Basic GEO SELECT +// + +selectLakes +SELECT fid, name, shore FROM lakes ORDER BY fid; +selectRoadSegments +SELECT fid, name, num_lanes, aliases, centerline FROM road_segments ORDER BY fid; +selectDividedRoutes +SELECT fid, name, num_lanes, centerlines FROM divided_routes ORDER BY fid; +selectForests +SELECT fid, name, boundary FROM forests ORDER BY fid; +selectBridges +SELECT fid, name, position FROM bridges ORDER BY fid; +selectStreams +SELECT fid, name, centerline FROM streams ORDER BY fid; +selectBuildings +SELECT fid, address, position, footprint FROM buildings ORDER BY fid; +selectPonds +SELECT fid, type, name, shores FROM ponds ORDER BY fid; +selectNamedPlaces +SELECT fid, name, boundary FROM named_places ORDER BY fid; +selectMapNeatLines +SELECT fid, neatline FROM map_neatlines ORDER BY fid; + +// +// Type conversion functions +// + +// The string serialization is slightly different between ES and H2, so we need to tweak it a bit by uppercasing both +// and removing floating point +selectRoadSegmentsAsWkt +SELECT fid, name, num_lanes, aliases, REPLACE(UCASE(ST_AsText(centerline)), '.0', '') centerline_wkt FROM road_segments ORDER BY fid; + +selectSinglePoint +SELECT ST_GeomFromText('point (10.0 12.0)') point; + + +// +// Geometry Property Functions +// +// H2GIS doesn't follow the standard here that mandates ST_Dimension returns SMALLINT +selectLakesProps +SELECT fid, UCASE(ST_GeometryType(shore)) type FROM lakes ORDER BY fid; +selectRoadSegmentsProps +SELECT fid, UCASE(ST_GeometryType(centerline)) type FROM road_segments ORDER BY fid; +selectDividedRoutesProps +SELECT fid, UCASE(ST_GeometryType(centerlines)) type FROM divided_routes ORDER BY fid; +selectForestsProps +SELECT fid, UCASE(ST_GeometryType(boundary)) type FROM forests ORDER BY fid; +selectBridgesProps +SELECT fid, UCASE(ST_GeometryType(position)) type FROM bridges ORDER BY fid; +selectStreamsProps +SELECT fid, UCASE(ST_GeometryType(centerline)) type FROM streams ORDER BY fid; +selectBuildingsProps +SELECT fid, UCASE(ST_GeometryType(position)) type1, UCASE(ST_GeometryType(footprint)) type2 FROM buildings ORDER BY fid; +selectPondsProps +SELECT fid, UCASE(ST_GeometryType(shores)) type FROM ponds ORDER BY fid; +selectNamedPlacesProps +SELECT fid, UCASE(ST_GeometryType(boundary)) type FROM named_places ORDER BY fid; +selectMapNeatLinesProps +SELECT fid, UCASE(ST_GeometryType(neatline)) type FROM map_neatlines ORDER BY fid; + +selectLakesXY +SELECT fid, ST_X(shore) x, ST_Y(shore) y FROM lakes ORDER BY fid; +selectRoadSegmentsXY +SELECT fid, ST_X(centerline) x, ST_Y(centerline) y FROM road_segments ORDER BY fid; +selectDividedRoutesXY +SELECT fid, ST_X(centerlines) x, ST_Y(centerlines) y FROM divided_routes ORDER BY fid; +selectForestsXY +SELECT fid, ST_X(boundary) x, ST_Y(boundary) y FROM forests ORDER BY fid; +selectBridgesPositionsXY +SELECT fid, ST_X(position) x, ST_Y(position) y FROM bridges ORDER BY fid; +selectStreamsXY +SELECT fid, ST_X(centerline) x, ST_Y(centerline) y FROM streams ORDER BY fid; +selectBuildingsXY +SELECT fid, ST_X(position) x, ST_Y(position) y FROM buildings ORDER BY fid; +selectBuildingsFootprintsXY +SELECT fid, ST_X(footprint) x, ST_Y(footprint) y FROM buildings ORDER BY fid; +selectPondsXY +SELECT fid, ST_X(shores) x, ST_Y(shores) y FROM ponds ORDER BY fid; +selectNamedPlacesXY +SELECT fid, ST_X(boundary) x, ST_Y(boundary) y FROM named_places ORDER BY fid; +selectMapNeatLinesXY +SELECT fid, ST_X(neatline) x, ST_Y(neatline) y FROM map_neatlines ORDER BY fid; diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/sqltsch.sql b/x-pack/plugin/sql/qa/src/main/resources/ogc/sqltsch.sql new file mode 100644 index 0000000000000..6d1322ecd3690 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/sqltsch.sql @@ -0,0 +1,672 @@ +-- FILE: sqltsch.sql 10/01/98 +-- +-- 1 2 3 4 5 6 7 8 +--345678901234567890123456789012345678901234567890123456789012345678901234567890 +--////////////////////////////////////////////////////////////////////////////// +-- +-- Copyright 1998, Open GIS Consortium, Inc. +-- +-- The material in this document details an Open GIS Consortium Test Suite in +-- accordance with a license that your organization has signed. Please refer +-- to http://www.opengeospatial.org/testing/ to obtain a copy of the general license +-- (it is part of the Conformance Testing Agreement). +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- OpenGIS Simple Features for SQL (Types and Functions) Test Suite Software +-- +-- This file "sqltsch.sql" is part 1 of a two part standardized test +-- suite in SQL script form. The other file that is required for this test +-- suite, "sqltque.sql", one additional script is provided ("sqltcle.sql") that +-- performs cleanup operations between test runs, and other documents that +-- describe the OGC Conformance Test Program are available via the WWW at +-- http://www.opengeospatial.org/testing/index.htm +-- +-- NOTE CONCERNING INFORMATION ON CONFORMANCE TESTING AND THIS TEST SUITE +-- ---------------------------------------------------------------------- +-- +-- Organizations wishing to submit product for conformance testing should +-- access the above WWW site to discover the proper procedure for obtaining +-- a license to use the OpenGIS(R) certification mark associated with this +-- test suite. +-- +-- +-- NOTE CONCERNING TEST SUITE ADAPTATION +-- ------------------------------------- +-- +-- OGC recognizes that many products will have to adapt this test suite to +-- make it work properly. OGC has documented the allowable adaptations within +-- this test suite where possible. Other information about adaptations may be +-- discovered in the Test Suite Guidelines document for this test suite. +-- +-- PLEASE NOTE THE OGC REQUIRES THAT ADAPTATIONS ARE FULLY DOCUMENTED USING +-- LIBERAL COMMENT BLOCKS CONFORMING TO THE FOLLOWING FORMAT: +-- +-- -- !#@ ADAPTATION BEGIN +-- explanatory text goes here +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- original sql goes here +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +-- adated sql goes here +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- BEGIN TEST SUITE CODE +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- Create the neccessary feature and geometry tables(views) and metadata tables +-- (views) to load and query the "Blue Lake" test data for OpenGIS Simple +-- Features for SQL (Types and Functions) test. +-- +-- Required feature tables (views) are: +-- Lakes +-- Road Segments +-- Divided Routes +-- Buildings +-- Forests +-- Bridges +-- Named Places +-- Streams +-- Ponds +-- Map Neatlines +-- +-- Please refer to the Test Suite Guidelines for this test suite for further +-- information concerning this test data. +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- CREATE SPATIAL_REF_SYS METADATA TABLE +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- +-- *** ADAPTATION ALERT **** +-- Implementations do not need to execute this statement if they already +-- create the spatial_ref_sys table or view via another mechanism. +-- The size of the srtext VARCHAR exceeds that allowed on some systems. +-- +-- CREATE TABLE spatial_ref_sys ( +-- srid INTEGER NOT NULL PRIMARY KEY, +-- auth_name VARCHAR(256), +-- auth_srid INTEGER, +-- -- srtext VARCHAR(2048) +-- srtext VARCHAR(2000) +-- ); +-- -- +-- INSERT INTO spatial_ref_sys VALUES(101, 'POSC', 32214, +-- 'PROJCS["UTM_ZONE_14N", GEOGCS["World Geodetic System 72", +-- DATUM["WGS_72", SPHEROID["NWL_10D", 6378135, 298.26]], +-- PRIMEM["Greenwich", 0], UNIT["Meter", 1.0]], +-- PROJECTION["Transverse_Mercator"], +-- PARAMETER["False_Easting", 500000.0], +-- PARAMETER["False_Northing", 0.0], +-- PARAMETER["Central_Meridian", -99.0], +-- PARAMETER["Scale_Factor", 0.9996], +-- PARAMETER["Latitude_of_origin", 0.0], +-- UNIT["Meter", 1.0]]' +-- ); +-- +-- +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- CREATE FEATURE SCHEMA +-- +-- *** ADAPTATION ALERT *** +-- The following schema is created using CREATE TABLE statements. +-- Furthermore, it DOES NOT create the GEOMETRY_COLUMNS metadata table. +-- Implementer's should replace the CREATE TABLES below with the mechanism +-- that it uses to create feature tables and the GEOMETRY_COLUMNS table/view +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-------------------------------------------------------------------------------- +-- +-- Create feature tables +-- +-------------------------------------------------------------------------------- +-- +-- Lakes +-- +-- +-- +-- +CREATE TABLE lakes ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + shore POLYGON +); +-- +-- Road Segments +-- +-- +-- +-- +CREATE TABLE road_segments ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + aliases VARCHAR(64), + num_lanes INTEGER, + centerline LINESTRING +); +-- +-- Divided Routes +-- +-- +-- +-- +CREATE TABLE divided_routes ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + num_lanes INTEGER, + centerlines MULTILINESTRING +); +-- +-- Forests +-- +-- +-- +-- +CREATE TABLE forests ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + boundary MULTIPOLYGON +); +-- +-- Bridges +-- +-- +-- +-- +CREATE TABLE bridges ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + position POINT +); +-- +-- Streams +-- +-- +-- +-- +CREATE TABLE streams ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + centerline LINESTRING +); +-- +-- Buildings +-- +--*** ADAPTATION ALERT *** +-- A view could be used to provide the below semantics without multiple geometry +-- columns in a table. In other words, create two tables. One table would +-- contain the POINT position and the other would create the POLYGON footprint. +-- Then create a view with the semantics of the buildings table below. +-- +-- +-- +CREATE TABLE buildings ( + fid INTEGER NOT NULL PRIMARY KEY, + address VARCHAR(64), + position POINT, + footprint POLYGON +); +-- +-- Ponds +-- +-- +-- +-- +-- -- !#@ ADAPTATION BEGIN +-- Fixes typo in the MULTIPOYLGON type +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- CREATE TABLE ponds ( +-- fid INTEGER NOT NULL PRIMARY KEY, +-- name VARCHAR(64), +-- type VARCHAR(64), +-- shores MULTIPOYLGON +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +CREATE TABLE ponds ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + type VARCHAR(64), + shores MULTIPOLYGON +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +-- Named Places +-- +-- +-- +-- +CREATE TABLE named_places ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + boundary POLYGON +); +-- +-- Map Neatline +-- +-- +-- +-- +CREATE TABLE map_neatlines ( + fid INTEGER NOT NULL PRIMARY KEY, + neatline POLYGON +); +-- +-- +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- POPULATE GEOMETRY AND FEATURE TABLES +-- +-- *** ADAPTATION ALERT *** +-- This script DOES NOT make any inserts into a GEOMTERY_COLUMNS table/view. +-- Implementers should insert whatever makes this happen in their implementation +-- below. Furthermore, the inserts below may be replaced by whatever mechanism +-- may be provided by implementers to insert rows in feature tables such that +-- metadata (and other mechanisms) are updated properly. +-- +--////////////////////////////////////////////////////////////////////////////// +-- +--============================================================================== +-- Lakes +-- +-- We have one lake, Blue Lake. It is a polygon with a hole. Its geometry is +-- described in WKT format as: +-- 'POLYGON( (52 18, 66 23, 73 9, 48 6, 52 18), +-- (59 18, 67 18, 67 13, 59 13, 59 18) )' +--============================================================================== +-- +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO lakes VALUES (101, 'BLUE LAKE', +-- PolygonFromText('POLYGON((52 18,66 23,73 9,48 6,52 18),(59 18,67 18,67 13,59 13,59 18))', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO lakes VALUES (101, 'BLUE LAKE', + ST_PolyFromText('POLYGON((52 18,66 23,73 9,48 6,52 18),(59 18,67 18,67 13,59 13,59 18))', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Road segments +-- +-- We have five road segments. Their geometries are all linestrings. +-- The geometries are described in WKT format as: +-- name 'Route 5', fid 102 +-- 'LINESTRING( 0 18, 10 21, 16 23, 28 26, 44 31 )' +-- name 'Route 5', fid 103 +-- 'LINESTRING( 44 31, 56 34, 70 38 )' +-- name 'Route 5', fid 104 +-- 'LINESTRING( 70 38, 72 48 )' +-- name 'Main Street', fid 105 +-- 'LINESTRING( 70 38, 84 42 )' +-- name 'Dirt Road by Green Forest', fid 106 +-- 'LINESTRING( 28 26, 28 0 )' +-- +--================== +-- +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO road_segments VALUES(102, 'Route 5', NULL, 2, +-- LineStringFromText('LINESTRING( 0 18, 10 21, 16 23, 28 26, 44 31 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(103, 'Route 5', 'Main Street', 4, +-- LineStringFromText('LINESTRING( 44 31, 56 34, 70 38 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(104, 'Route 5', NULL, 2, +-- LineStringFromText('LINESTRING( 70 38, 72 48 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(105, 'Main Street', NULL, 4, +-- LineStringFromText('LINESTRING( 70 38, 84 42 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(106, 'Dirt Road by Green Forest', NULL, 1, +-- LineStringFromText('LINESTRING( 28 26, 28 0 )',101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO road_segments VALUES(102, 'Route 5', NULL, 2, + ST_LineFromText('LINESTRING( 0 18, 10 21, 16 23, 28 26, 44 31 )' ,101) +); +INSERT INTO road_segments VALUES(103, 'Route 5', 'Main Street', 4, + ST_LineFromText('LINESTRING( 44 31, 56 34, 70 38 )' ,101) +); +INSERT INTO road_segments VALUES(104, 'Route 5', NULL, 2, + ST_LineFromText('LINESTRING( 70 38, 72 48 )' ,101) +); +INSERT INTO road_segments VALUES(105, 'Main Street', NULL, 4, + ST_LineFromText('LINESTRING( 70 38, 84 42 )' ,101) +); +INSERT INTO road_segments VALUES(106, 'Dirt Road by Green Forest', NULL, 1, + ST_LineFromText('LINESTRING( 28 26, 28 0 )',101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +--================== +-- DividedRoutes +-- +-- We have one divided route. Its geometry is a multilinestring. +-- The geometry is described in WKT format as: +-- 'MULTILINESTRING( (10 48, 10 21, 10 0), (16 0, 10 23, 16 48) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO divided_routes VALUES(119, 'Route 75', 4, +-- MultiLineStringFromText('MULTILINESTRING((10 48,10 21,10 0),(16 0,16 23,16 48))', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO divided_routes VALUES(119, 'Route 75', 4, + ST_MLineFromText('MULTILINESTRING((10 48,10 21,10 0),(16 0,16 23,16 48))', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Forests +-- +-- We have one forest. Its geometry is a multipolygon. +-- The geometry is described in WKT format as: +-- 'MULTIPOLYGON( ( (28 26, 28 0, 84 0, 84 42, 28 26), +-- (52 18, 66 23, 73 9, 48 6, 52 18) ), +-- ( (59 18, 67 18, 67 13, 59 13, 59 18) ) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO forests VALUES(109, 'Green Forest', +-- MultiPolygonFromText('MULTIPOLYGON(((28 26,28 0,84 0,84 42,28 26),(52 18,66 23,73 9,48 6,52 18)),((59 18,67 18,67 13,59 13,59 18)))', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO forests VALUES(109, 'Green Forest', + ST_MPolyFromText('MULTIPOLYGON(((28 26,28 0,84 0,84 42,28 26),(52 18,66 23,73 9,48 6,52 18)),((59 18,67 18,67 13,59 13,59 18)))', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +--================== +-- Bridges +-- +-- We have one bridge. Its geometry is a point. +-- The geometry is described in WKT format as: +-- 'POINT( 44 31 )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO bridges VALUES(110, 'Cam Bridge', +-- PointFromText('POINT( 44 31 )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO bridges VALUES(110, 'Cam Bridge', + ST_PointFromText('POINT( 44 31 )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Streams +-- +-- We have two streams. Their geometries are linestrings. +-- The geometries are described in WKT format as: +-- 'LINESTRING( 38 48, 44 41, 41 36, 44 31, 52 18 )' +-- 'LINESTRING( 76 0, 78 4, 73 9 )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO streams VALUES(111, 'Cam Stream', +-- LineStringFromText('LINESTRING( 38 48, 44 41, 41 36, 44 31, 52 18 )', 101) +-- ); +-- INSERT INTO streams VALUES(112, NULL, +-- LineStringFromText('LINESTRING( 76 0, 78 4, 73 9 )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO streams VALUES(111, 'Cam Stream', + ST_LineFromText('LINESTRING( 38 48, 44 41, 41 36, 44 31, 52 18 )', 101) +); +INSERT INTO streams VALUES(112, NULL, + ST_LineFromText('LINESTRING( 76 0, 78 4, 73 9 )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Buildings +-- +-- We have two buildings. Their geometries are points and polygons. +-- The geometries are described in WKT format as: +-- address '123 Main Street' fid 113 +-- 'POINT( 52 30 )' and +-- 'POLYGON( ( 50 31, 54 31, 54 29, 50 29, 50 31) )' +-- address '215 Main Street' fid 114 +-- 'POINT( 64 33 )' and +-- 'POLYGON( ( 66 34, 62 34, 62 32, 66 32, 66 34) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO buildings VALUES(113, '123 Main Street', +-- PointFromText('POINT( 52 30 )', 101), +-- PolygonFromText('POLYGON( ( 50 31, 54 31, 54 29, 50 29, 50 31) )', 101) +-- ); +-- INSERT INTO buildings VALUES(114, '215 Main Street', +-- PointFromText('POINT( 64 33 )', 101), +-- PolygonFromText('POLYGON( ( 66 34, 62 34, 62 32, 66 32, 66 34) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO buildings VALUES(113, '123 Main Street', + ST_PointFromText('POINT( 52 30 )', 101), + ST_PolyFromText('POLYGON( ( 50 31, 54 31, 54 29, 50 29, 50 31) )', 101) +); +INSERT INTO buildings VALUES(114, '215 Main Street', + ST_PointFromText('POINT( 64 33 )', 101), + ST_PolyFromText('POLYGON( ( 66 34, 62 34, 62 32, 66 32, 66 34) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Ponds +-- +-- We have one pond. Its geometry is a multipolygon. +-- The geometry is described in WKT format as: +-- 'MULTIPOLYGON( ( ( 24 44, 22 42, 24 40, 24 44) ), ( ( 26 44, 26 40, 28 42, 26 44) ) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO ponds VALUES(120, NULL, 'Stock Pond', +-- MultiPolygonFromText('MULTIPOLYGON( ( ( 24 44, 22 42, 24 40, 24 44) ), ( ( 26 44, 26 40, 28 42, 26 44) ) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO ponds VALUES(120, NULL, 'Stock Pond', + ST_MPolyFromText('MULTIPOLYGON( ( ( 24 44, 22 42, 24 40, 24 44) ), ( ( 26 44, 26 40, 28 42, 26 44) ) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +--================== +-- Named Places +-- +-- We have two named places. Their geometries are polygons. +-- The geometries are described in WKT format as: +-- name 'Ashton' fid 117 +-- 'POLYGON( ( 62 48, 84 48, 84 30, 56 30, 56 34, 62 48) )' +-- address 'Goose Island' fid 118 +-- 'POLYGON( ( 67 13, 67 18, 59 18, 59 13, 67 13) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO named_places VALUES(117, 'Ashton', +-- PolygonFromText('POLYGON( ( 62 48, 84 48, 84 30, 56 30, 56 34, 62 48) )', 101) +-- ); +-- INSERT INTO named_places VALUES(118, 'Goose Island', +-- PolygonFromText('POLYGON( ( 67 13, 67 18, 59 18, 59 13, 67 13) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO named_places VALUES(117, 'Ashton', + ST_PolyFromText('POLYGON( ( 62 48, 84 48, 84 30, 56 30, 56 34, 62 48) )', 101) +); +INSERT INTO named_places VALUES(118, 'Goose Island', + ST_PolyFromText('POLYGON( ( 67 13, 67 18, 59 18, 59 13, 67 13) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Map Neatlines +-- +-- We have one map neatline. Its geometry is a polygon. +-- The geometry is described in WKT format as: +-- 'POLYGON( ( 0 0, 0 48, 84 48, 84 0, 0 0 ) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO map_neatlines VALUES(115, +-- PolygonFromText('POLYGON( ( 0 0, 0 48, 84 48, 84 0, 0 0 ) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO map_neatlines VALUES(115, + ST_PolyFromText('POLYGON( ( 0 0, 0 48, 84 48, 84 0, 0 0 ) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +-- +-- +-- end sqltsch.sql \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec new file mode 100644 index 0000000000000..c9380fae2809e --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec @@ -0,0 +1,15 @@ +// +// Geo-specific Sys Commands +// + +geoSysColumns +SYS COLUMNS TABLE LIKE 'geo'; + + TABLE_CAT:s | TABLE_SCHEM:s| TABLE_NAME:s | COLUMN_NAME:s | DATA_TYPE:i | TYPE_NAME:s | COLUMN_SIZE:i|BUFFER_LENGTH:i|DECIMAL_DIGITS:i|NUM_PREC_RADIX:i| NULLABLE:i| REMARKS:s | COLUMN_DEF:s |SQL_DATA_TYPE:i|SQL_DATETIME_SUB:i|CHAR_OCTET_LENGTH:i|ORDINAL_POSITION:i|IS_NULLABLE:s|SCOPE_CATALOG:s|SCOPE_SCHEMA:s|SCOPE_TABLE:s|SOURCE_DATA_TYPE:sh|IS_AUTOINCREMENT:s|IS_GENERATEDCOLUMN:s +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |city |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |1 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |location |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |2 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |location_no_dv |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |3 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |region |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |4 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |region_point |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |5 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |shape |114 |GEO_SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |6 |YES |null |null |null |null |NO |NO +; \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index db84a444f5794..d5a4cb436e6a5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -63,6 +63,7 @@ import static org.elasticsearch.xpack.sql.stats.FeatureMetric.LOCAL; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.ORDERBY; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.WHERE; +import static org.elasticsearch.xpack.sql.type.DataType.GEO_SHAPE; /** * The verifier has the role of checking the analyzed tree for failures and build a list of failures following this check. @@ -131,7 +132,6 @@ Collection verify(LogicalPlan plan) { // start bottom-up plan.forEachUp(p -> { - if (p.analyzed()) { return; } @@ -236,6 +236,7 @@ Collection verify(LogicalPlan plan) { checkForScoreInsideFunctions(p, localFailures); checkNestedUsedInGroupByOrHaving(p, localFailures); + checkForGeoFunctionsOnDocValues(p, localFailures); // everything checks out // mark the plan as analyzed @@ -719,4 +720,33 @@ private static void checkNestedUsedInGroupByOrHaving(LogicalPlan p, Set fail(nested.get(0), "HAVING isn't (yet) compatible with nested fields " + new AttributeSet(nested).names())); } } + + /** + * Makes sure that geo shapes do not appear in filter, aggregation and sorting contexts + */ + private static void checkForGeoFunctionsOnDocValues(LogicalPlan p, Set localFailures) { + + p.forEachDown(f -> { + f.condition().forEachUp(fa -> { + if (fa.field().getDataType() == GEO_SHAPE) { + localFailures.add(fail(fa, "geo shapes cannot be used for filtering")); + } + }, FieldAttribute.class); + }, Filter.class); + + // geo shape fields shouldn't be used in aggregates or having (yet) + p.forEachDown(a -> a.groupings().forEach(agg -> agg.forEachUp(fa -> { + if (fa.field().getDataType() == GEO_SHAPE) { + localFailures.add(fail(fa, "geo shapes cannot be used in grouping")); + } + }, FieldAttribute.class)), Aggregate.class); + + + // geo shape fields shouldn't be used in order by clauses + p.forEachDown(o -> o.order().forEach(agg -> agg.forEachUp(fa -> { + if (fa.field().getDataType() == GEO_SHAPE) { + localFailures.add(fail(fa, "geo shapes cannot be used for sorting")); + } + }, FieldAttribute.class)), OrderBy.class); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java index 652197473abf4..13294fbca221b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -5,13 +5,17 @@ */ package org.elasticsearch.xpack.sql.execution.search.extractor; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.DateUtils; @@ -128,13 +132,31 @@ private Object unwrapMultiValue(Object values) { if (list.isEmpty()) { return null; } else { - if (arrayLeniency || list.size() == 1) { - return unwrapMultiValue(list.get(0)); - } else { - throw new SqlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName); + // let's make sure first that we are not dealing with an geo_point represented as an array + if (isGeoPointArray(list) == false) { + if (list.size() == 1 || arrayLeniency) { + return unwrapMultiValue(list.get(0)); + } else { + throw new SqlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName); + } } } } + if (dataType == DataType.GEO_POINT) { + try { + GeoPoint geoPoint = GeoUtils.parseGeoPoint(values, true); + return new GeoShape(geoPoint.lon(), geoPoint.lat()); + } catch (ElasticsearchParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse geo_point value [{}] (returned by [{}])", values, fieldName); + } + } + if (dataType == DataType.GEO_SHAPE) { + try { + return new GeoShape(values); + } catch (IOException ex) { + throw new SqlIllegalArgumentException("Cannot read geo_shape value [{}] (returned by [{}])", values, fieldName); + } + } if (values instanceof Map) { throw new SqlIllegalArgumentException("Objects (returned by [{}]) are not supported", fieldName); } @@ -149,6 +171,17 @@ private Object unwrapMultiValue(Object values) { throw new SqlIllegalArgumentException("Type {} (returned by [{}]) is not supported", values.getClass().getSimpleName(), fieldName); } + private boolean isGeoPointArray(List list) { + if (dataType != DataType.GEO_POINT) { + return false; + } + // we expect the point in [lon lat] or [lon lat alt] formats + if (list.size() > 3 || list.size() < 1) { + return false; + } + return list.get(0) instanceof Number; + } + @SuppressWarnings({ "unchecked", "rawtypes" }) Object extractFromSource(Map map) { Object value = null; @@ -173,7 +206,9 @@ Object extractFromSource(Map map) { if (node instanceof List) { List listOfValues = (List) node; - if (listOfValues.size() == 1 || arrayLeniency) { + // we can only do this optimization until the last element of our pass since geo points are using arrays + // and we don't want to blindly ignore the second element of array if arrayLeniency is enabled + if ((i < path.length - 1) && (listOfValues.size() == 1 || arrayLeniency)) { // this is a List with a size of 1 e.g.: {"a" : [{"b" : "value"}]} meaning the JSON is a list with one element // or a list of values with one element e.g.: {"a": {"b" : ["value"]}} // in case of being lenient about arrays, just extract the first value in the array diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java index f6e1e3ad8be69..d382dad83a19d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java @@ -57,6 +57,11 @@ public static TypeResolution isNumericOrDateOrTime(Expression e, String operatio "date", "time", "datetime", "numeric"); } + + public static TypeResolution isGeo(Expression e, String operationName, ParamOrdinal paramOrd) { + return isType(e, DataType::isGeo, operationName, paramOrd, "geo_point", "geo_shape"); + } + public static TypeResolution isExact(Expression e, String message) { if (e instanceof FieldAttribute) { EsField.Exact exact = ((FieldAttribute) e).getExactInfo(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 0e9f07ef2132c..3a9ae06203476 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -46,6 +46,13 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.SecondOfMinute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.WeekOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StAswkt; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistance; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StGeometryType; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosql; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StX; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StY; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StZ; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ACos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ASin; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ATan; @@ -249,11 +256,23 @@ private void defineDefaultFunctions() { def(Space.class, Space::new, "SPACE"), def(Substring.class, Substring::new, "SUBSTRING"), def(UCase.class, UCase::new, "UCASE")); + // DataType conversion addToMap(def(Cast.class, Cast::new, "CAST", "CONVERT")); // Scalar "meta" functions addToMap(def(Database.class, Database::new, "DATABASE"), def(User.class, User::new, "USER")); + + // Geo Functions + addToMap(def(StAswkt.class, StAswkt::new, "ST_ASWKT", "ST_ASTEXT"), + def(StDistance.class, StDistance::new, "ST_DISTANCE"), + def(StWkttosql.class, StWkttosql::new, "ST_WKTTOSQL", "ST_GEOMFROMTEXT"), + def(StGeometryType.class, StGeometryType::new, "ST_GEOMETRYTYPE"), + def(StX.class, StX::new, "ST_X"), + def(StY.class, StY::new, "ST_Y"), + def(StZ.class, StZ::new, "ST_Z") + ); + // Special addToMap(def(Score.class, Score::new, "SCORE")); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index d14aeea507f47..0b9bbd1094a44 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -11,6 +11,9 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor; @@ -98,6 +101,10 @@ public static List getNamedWriteables() { entries.add(new Entry(Processor.class, LocateFunctionProcessor.NAME, LocateFunctionProcessor::new)); entries.add(new Entry(Processor.class, ReplaceFunctionProcessor.NAME, ReplaceFunctionProcessor::new)); entries.add(new Entry(Processor.class, SubstringFunctionProcessor.NAME, SubstringFunctionProcessor::new)); + // geo + entries.add(new Entry(Processor.class, GeoProcessor.NAME, GeoProcessor::new)); + entries.add(new Entry(Processor.class, StWkttosqlProcessor.NAME, StWkttosqlProcessor::new)); + entries.add(new Entry(Processor.class, StDistanceProcessor.NAME, StDistanceProcessor::new)); return entries; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessor.java new file mode 100644 index 0000000000000..519e4c0c74092 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessor.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; +import java.util.function.Function; + +public class GeoProcessor implements Processor { + + private interface GeoShapeFunction { + default R apply(Object o) { + if (o instanceof GeoShape) { + return doApply((GeoShape) o); + } else { + throw new SqlIllegalArgumentException("A geo_point or geo_shape is required; received [{}]", o); + } + } + + R doApply(GeoShape s); + } + + public enum GeoOperation { + ASWKT(GeoShape::toString), + GEOMETRY_TYPE(GeoShape::getGeometryType), + X(GeoShape::getX), + Y(GeoShape::getY), + Z(GeoShape::getZ); + + private final Function apply; + + GeoOperation(GeoShapeFunction apply) { + this.apply = l -> l == null ? null : apply.apply(l); + } + + public final Object apply(Object l) { + return apply.apply(l); + } + } + + public static final String NAME = "geo"; + + private final GeoOperation processor; + + public GeoProcessor(GeoOperation processor) { + this.processor = processor; + } + + public GeoProcessor(StreamInput in) throws IOException { + processor = in.readEnum(GeoOperation.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(processor); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Object process(Object input) { + return processor.apply(input); + } + + GeoOperation processor() { + return processor; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + GeoProcessor other = (GeoProcessor) obj; + return processor == other.processor; + } + + @Override + public int hashCode() { + return processor.hashCode(); + } + + @Override + public String toString() { + return processor.toString(); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java new file mode 100644 index 0000000000000..74b5c9646b853 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java @@ -0,0 +1,222 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.GeometryParser; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.geo.geometry.Circle; +import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.geometry.GeometryCollection; +import org.elasticsearch.geo.geometry.GeometryVisitor; +import org.elasticsearch.geo.geometry.Line; +import org.elasticsearch.geo.geometry.LinearRing; +import org.elasticsearch.geo.geometry.MultiLine; +import org.elasticsearch.geo.geometry.MultiPoint; +import org.elasticsearch.geo.geometry.MultiPolygon; +import org.elasticsearch.geo.geometry.Point; +import org.elasticsearch.geo.geometry.Polygon; +import org.elasticsearch.geo.geometry.Rectangle; +import org.elasticsearch.geo.utils.WellKnownText; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + +import java.io.IOException; +import java.io.InputStream; +import java.text.ParseException; +import java.util.Objects; + +/** + * Wrapper class to represent a GeoShape in SQL + * + * It is required to override the XContent serialization. The ShapeBuilder serializes using GeoJSON by default, + * but in SQL we need the serialization to be WKT-based. + */ +public class GeoShape implements ToXContentFragment, NamedWriteable { + + public static final String NAME = "geo"; + + private final Geometry shape; + + public GeoShape(double lon, double lat) { + shape = new Point(lat, lon); + } + + public GeoShape(Object value) throws IOException { + try { + shape = parse(value); + } catch (ParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + } + } + + public GeoShape(StreamInput in) throws IOException { + String value = in.readString(); + try { + shape = parse(value); + } catch (ParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(WellKnownText.toWKT(shape)); + } + + @Override + public String toString() { + return WellKnownText.toWKT(shape); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.value(WellKnownText.toWKT(shape)); + } + + public Geometry toGeometry() { + return shape; + } + + public Point firstPoint() { + return shape.visit(new GeometryVisitor() { + @Override + public Point visit(Circle circle) { + return new Point(circle.getLat(), circle.getLon(), circle.hasAlt() ? circle.getAlt() : Double.NaN); + } + + @Override + public Point visit(GeometryCollection collection) { + if (collection.size() > 0) { + return collection.get(0).visit(this); + } + return null; + } + + @Override + public Point visit(Line line) { + if (line.length() > 0) { + return new Point(line.getLat(0), line.getLon(0), line.hasAlt() ? line.getAlt(0) : Double.NaN); + } + return null; + } + + @Override + public Point visit(LinearRing ring) { + return visit((Line) ring); + } + + @Override + public Point visit(MultiLine multiLine) { + return visit((GeometryCollection) multiLine); + } + + @Override + public Point visit(MultiPoint multiPoint) { + return visit((GeometryCollection) multiPoint); + } + + @Override + public Point visit(MultiPolygon multiPolygon) { + return visit((GeometryCollection) multiPolygon); + } + + @Override + public Point visit(Point point) { + return point; + } + + @Override + public Point visit(Polygon polygon) { + return visit(polygon.getPolygon()); + } + + @Override + public Point visit(Rectangle rectangle) { + return new Point(rectangle.getMinLat(), rectangle.getMinLon(), rectangle.getMinAlt()); + } + }); + } + + public Double getX() { + Point firstPoint = firstPoint(); + return firstPoint != null ? firstPoint.getLon() : null; + } + + public Double getY() { + Point firstPoint = firstPoint(); + return firstPoint != null ? firstPoint.getLat() : null; + } + + public Double getZ() { + Point firstPoint = firstPoint(); + return firstPoint != null && firstPoint.hasAlt() ? firstPoint.getAlt() : null; + } + + public String getGeometryType() { + return toGeometry().type().name(); + } + + public static double distance(GeoShape shape1, GeoShape shape2) { + if (shape1.shape instanceof Point == false) { + throw new SqlIllegalArgumentException("distance calculation is only supported for points; received [{}]", shape1); + } + if (shape2.shape instanceof Point == false) { + throw new SqlIllegalArgumentException("distance calculation is only supported for points; received [{}]", shape2); + } + double srcLat = ((Point) shape1.shape).getLat(); + double srcLon = ((Point) shape1.shape).getLon(); + double dstLat = ((Point) shape2.shape).getLat(); + double dstLon = ((Point) shape2.shape).getLon(); + return GeoUtils.arcDistance(srcLat, srcLon, dstLat, dstLon); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GeoShape geoShape = (GeoShape) o; + return shape.equals(geoShape.shape); + } + + @Override + public int hashCode() { + return Objects.hash(shape); + } + + @Override + public String getWriteableName() { + return NAME; + } + + private static Geometry parse(Object value) throws IOException, ParseException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.startObject(); + content.field("value", value); + content.endObject(); + + try (InputStream stream = BytesReference.bytes(content).streamInput(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + return GeometryParser.parse(parser, true, true, true); + } + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StAswkt.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StAswkt.java new file mode 100644 index 0000000000000..5c4b6edbe87eb --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StAswkt.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_AsWKT function that takes a geometry and returns its Well Known Text representation + */ +public class StAswkt extends UnaryGeoFunction { + + public StAswkt(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StAswkt::new, field()); + } + + @Override + protected StAswkt replaceChild(Expression newChild) { + return new StAswkt(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.ASWKT; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistance.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistance.java new file mode 100644 index 0000000000000..fd14e90dd9d93 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistance.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isGeo; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +/** + * Calculates the distance between two points + */ +public class StDistance extends BinaryOperator { + + private static final StDistanceFunction FUNCTION = new StDistanceFunction(); + + public StDistance(Source source, Expression source1, Expression source2) { + super(source, source1, source2, FUNCTION); + } + + @Override + protected StDistance replaceChildren(Expression newLeft, Expression newRight) { + return new StDistance(source(), newLeft, newRight); + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StDistance::new, left(), right()); + } + + @Override + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("{sql}.geoDocValue(doc,{})"), + paramsBuilder().variable(field.exactAttribute().name()).build(), + dataType()); + } + + @Override + protected TypeResolution resolveInputType(Expression e, Expressions.ParamOrdinal paramOrdinal) { + return isGeo(e, sourceText(), paramOrdinal); + } + + @Override + public StDistance swapLeftAndRight() { + return new StDistance(source(), right(), left()); + } + + @Override + protected Pipe makePipe() { + return new StDistancePipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right())); + } + + @Override + protected String scriptMethodName() { + return "stDistance"; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceFunction.java new file mode 100644 index 0000000000000..d1c15c1e2a1b2 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceFunction.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.predicate.PredicateBiFunction; + +class StDistanceFunction implements PredicateBiFunction { + + @Override + public String name() { + return "ST_DISTANCE"; + } + + @Override + public String symbol() { + return "ST_DISTANCE"; + } + + @Override + public Double doApply(Object s1, Object s2) { + return StDistanceProcessor.process(s1, s2); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistancePipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistancePipe.java new file mode 100644 index 0000000000000..c944266482651 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistancePipe.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.Objects; + +public class StDistancePipe extends BinaryPipe { + + public StDistancePipe(Source source, Expression expression, Pipe left, Pipe right) { + super(source, expression, left, right); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StDistancePipe::new, expression(), left(), right()); + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new StDistancePipe(source(), expression(), left, right); + } + + @Override + public StDistanceProcessor asProcessor() { + return new StDistanceProcessor(left().asProcessor(), right().asProcessor()); + } + + @Override + public int hashCode() { + return Objects.hash(left(), right()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + StDistancePipe other = (StDistancePipe) obj; + return Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessor.java new file mode 100644 index 0000000000000..d6c9026b982d9 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessor.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; +import java.util.Objects; + +public class StDistanceProcessor extends BinaryProcessor { + + public static final String NAME = "geo_distance"; + + public StDistanceProcessor(Processor source1, Processor source2) { + super(source1, source2); + } + + public StDistanceProcessor(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWrite(StreamOutput out) throws IOException { + + } + + @Override + public Object process(Object input) { + Object l = left().process(input); + checkParameter(l); + Object r = right().process(input); + checkParameter(r); + return doProcess(l, r); + } + + @Override + protected Object doProcess(Object left, Object right) { + return process(left, right); + } + + public static Double process(Object source1, Object source2) { + if (source1 == null || source2 == null) { + return null; + } + + if (source1 instanceof GeoShape == false) { + throw new SqlIllegalArgumentException("A geo_point or geo_shape with type point is required; received [{}]", source1); + } + if (source2 instanceof GeoShape == false) { + throw new SqlIllegalArgumentException("A geo_point or geo_shape with type point is required; received [{}]", source2); + } + return GeoShape.distance((GeoShape) source1, (GeoShape) source2); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + StDistanceProcessor other = (StDistanceProcessor) obj; + return Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()); + } + + @Override + public int hashCode() { + return Objects.hash(left(), right()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StGeometryType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StGeometryType.java new file mode 100644 index 0000000000000..15215bd9201de --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StGeometryType.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_GEOMETRY_TYPE function that takes a geometry and returns its type + */ +public class StGeometryType extends UnaryGeoFunction { + + public StGeometryType(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StGeometryType::new, field()); + } + + @Override + protected StGeometryType replaceChild(Expression newChild) { + return new StGeometryType(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.GEOMETRY_TYPE; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosql.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosql.java new file mode 100644 index 0000000000000..3ebae55dec4f0 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosql.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isString; + +/** + * Constructs geometric objects from their WTK representations + */ +public class StWkttosql extends UnaryScalarFunction { + + public StWkttosql(Source source, Expression field) { + super(source, field); + } + + @Override + protected StWkttosql replaceChild(Expression newChild) { + return new StWkttosql(source(), newChild); + } + + @Override + protected TypeResolution resolveType() { + if (field().dataType().isString()) { + return TypeResolution.TYPE_RESOLVED; + } + return isString(field(), functionName(), Expressions.ParamOrdinal.DEFAULT); + } + + @Override + protected Processor makeProcessor() { + return StWkttosqlProcessor.INSTANCE; + } + + @Override + public DataType dataType() { + return DataType.GEO_SHAPE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StWkttosql::new, field()); + } + + @Override + public String processScript(String script) { + return Scripts.formatTemplate(Scripts.SQL_SCRIPTS + ".stWktToSql(" + script + ")"); + } + + @Override + public Object fold() { + return StWkttosqlProcessor.INSTANCE.process(field().fold()); + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java new file mode 100644 index 0000000000000..f17ee2315befe --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; + +public class StWkttosqlProcessor implements Processor { + + static final StWkttosqlProcessor INSTANCE = new StWkttosqlProcessor(); + + public static final String NAME = "geo_wkttosql"; + + StWkttosqlProcessor() { + } + + public StWkttosqlProcessor(StreamInput in) throws IOException { + } + + @Override + public Object process(Object input) { + return StWkttosqlProcessor.apply(input); + } + + public static GeoShape apply(Object input) { + if (input == null) { + return null; + } + + if ((input instanceof String) == false) { + throw new SqlIllegalArgumentException("A string is required; received [{}]", input); + } + try { + return new GeoShape(input); + } catch (IOException | IllegalArgumentException | ElasticsearchParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse [{}] as a geo_shape value", input); + } + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return 0; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StX.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StX.java new file mode 100644 index 0000000000000..f3cdafbe70dab --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StX.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_X function that takes a geometry and returns the X coordinate of its first point + */ +public class StX extends UnaryGeoFunction { + + public StX(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StX::new, field()); + } + + @Override + protected StX replaceChild(Expression newChild) { + return new StX(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.X; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StY.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StY.java new file mode 100644 index 0000000000000..0a9bc3aa1a40b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StY.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_Y function that takes a geometry and returns the Y coordinate of its first point + */ +public class StY extends UnaryGeoFunction { + + public StY(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StY::new, field()); + } + + @Override + protected StY replaceChild(Expression newChild) { + return new StY(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.Y; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StZ.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StZ.java new file mode 100644 index 0000000000000..b6c0c9466bbe1 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StZ.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_Z function that takes a geometry and returns the Z coordinate of its first point + */ +public class StZ extends UnaryGeoFunction { + + public StZ(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StZ::new, field()); + } + + @Override + protected StZ replaceChild(Expression newChild) { + return new StZ(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.Z; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/UnaryGeoFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/UnaryGeoFunction.java new file mode 100644 index 0000000000000..50c05b7fbedb7 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/UnaryGeoFunction.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.util.StringUtils; + +import java.util.Locale; +import java.util.Objects; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isGeo; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +/** + * Base class for functions that get a single geo shape or geo point as an argument + */ +public abstract class UnaryGeoFunction extends UnaryScalarFunction { + + protected UnaryGeoFunction(Source source, Expression field) { + super(source, field); + } + + @Override + public Object fold() { + return operation().apply(field().fold()); + } + + @Override + protected TypeResolution resolveType() { + if (!childrenResolved()) { + return new TypeResolution("Unresolved children"); + } + return isGeo(field(), operation().toString(), Expressions.ParamOrdinal.DEFAULT); + } + + @Override + protected Processor makeProcessor() { + return new GeoProcessor(operation()); + } + + protected abstract GeoProcessor.GeoOperation operation(); + + @Override + public ScriptTemplate scriptWithField(FieldAttribute field) { + //TODO change this to use _source instead of the exact form (aka field.keyword for geo shape fields) + return new ScriptTemplate(processScript("{sql}.geoDocValue(doc,{})"), + paramsBuilder().variable(field.exactAttribute().name()).build(), + dataType()); + } + + @Override + public String processScript(String template) { + // basically, transform the script to InternalSqlScriptUtils.[function_name](other_function_or_field_name) + return super.processScript( + format(Locale.ROOT, "{sql}.%s(%s)", + StringUtils.underscoreToLowerCamelCase("ST_" + operation().name()), + template)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + UnaryGeoFunction other = (UnaryGeoFunction) obj; + return Objects.equals(other.field(), field()); + } + + @Override + public int hashCode() { + return Objects.hash(field()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 6a4ec411fe1cf..d39aec4423684 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression.function.scalar.whitelist; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.script.JodaCompatibleZonedDateTime; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; @@ -12,6 +13,10 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor.BinaryOptionalMathOperation; @@ -73,7 +78,7 @@ public static Object docValue(Map> doc, String fi } return null; } - + public static boolean nullSafeFilter(Boolean filter) { return filter == null ? false : filter.booleanValue(); } @@ -109,7 +114,7 @@ public static Boolean neq(Object left, Object right) { public static Boolean lt(Object left, Object right) { return BinaryComparisonOperation.LT.apply(left, right); } - + public static Boolean lte(Object left, Object right) { return BinaryComparisonOperation.LTE.apply(left, right); } @@ -125,7 +130,7 @@ public static Boolean gte(Object left, Object right) { public static Boolean and(Boolean left, Boolean right) { return BinaryLogicOperation.AND.apply(left, right); } - + public static Boolean or(Boolean left, Boolean right) { return BinaryLogicOperation.OR.apply(left, right); } @@ -328,14 +333,14 @@ public static Integer dateTimeChrono(Object dateTime, String tzId, String chrono } return DateTimeFunction.dateTimeChrono(asDateTime(dateTime), tzId, chronoName); } - + public static String dayName(Object dateTime, String tzId) { if (dateTime == null || tzId == null) { return null; } return NameExtractor.DAY_NAME.extract(asDateTime(dateTime), tzId); } - + public static Integer dayOfWeek(Object dateTime, String tzId) { if (dateTime == null || tzId == null) { return null; @@ -349,7 +354,7 @@ public static String monthName(Object dateTime, String tzId) { } return NameExtractor.MONTH_NAME.extract(asDateTime(dateTime), tzId); } - + public static Integer quarter(Object dateTime, String tzId) { if (dateTime == null || tzId == null) { return null; @@ -390,7 +395,7 @@ private static Object asDateTime(Object dateTime, boolean lenient) { } return dateTime; } - + public static IntervalDayTime intervalDayTime(String text, String typeName) { if (text == null || typeName == null) { return null; @@ -416,7 +421,7 @@ public static OffsetTime asTime(String time) { public static Integer ascii(String s) { return (Integer) StringOperation.ASCII.apply(s); } - + public static Integer bitLength(String s) { return (Integer) StringOperation.BIT_LENGTH.apply(s); } @@ -428,7 +433,7 @@ public static String character(Number n) { public static Integer charLength(String s) { return (Integer) StringOperation.CHAR_LENGTH.apply(s); } - + public static String concat(String s1, String s2) { return (String) ConcatFunctionProcessor.process(s1, s2); } @@ -452,7 +457,7 @@ public static Integer length(String s) { public static Integer locate(String s1, String s2) { return locate(s1, s2, null); } - + public static Integer locate(String s1, String s2, Number pos) { return LocateFunctionProcessor.doProcess(s1, s2, pos); } @@ -460,7 +465,7 @@ public static Integer locate(String s1, String s2, Number pos) { public static String ltrim(String s) { return (String) StringOperation.LTRIM.apply(s); } - + public static Integer octetLength(String s) { return (Integer) StringOperation.OCTET_LENGTH.apply(s); } @@ -468,15 +473,15 @@ public static Integer octetLength(String s) { public static Integer position(String s1, String s2) { return (Integer) BinaryStringStringOperation.POSITION.apply(s1, s2); } - + public static String repeat(String s, Number count) { return BinaryStringNumericOperation.REPEAT.apply(s, count); } - + public static String replace(String s1, String s2, String s3) { return (String) ReplaceFunctionProcessor.doProcess(s1, s2, s3); } - + public static String right(String s, Number count) { return BinaryStringNumericOperation.RIGHT.apply(s, count); } @@ -496,7 +501,47 @@ public static String substring(String s, Number start, Number length) { public static String ucase(String s) { return (String) StringOperation.UCASE.apply(s); } - + + public static String stAswkt(Object v) { + return GeoProcessor.GeoOperation.ASWKT.apply(v).toString(); + } + + public static GeoShape stWktToSql(String wktString) { + return StWkttosqlProcessor.apply(wktString); + } + + public static Double stDistance(Object v1, Object v2) { + return StDistanceProcessor.process(v1, v2); + } + + public static String stGeometryType(Object g) { + return (String) GeoProcessor.GeoOperation.GEOMETRY_TYPE.apply(g); + } + + public static Double stX(Object g) { + return (Double) GeoProcessor.GeoOperation.X.apply(g); + } + + public static Double stY(Object g) { + return (Double) GeoProcessor.GeoOperation.Y.apply(g); + } + + public static Double stZ(Object g) { + return (Double) GeoProcessor.GeoOperation.Z.apply(g); + } + + // processes doc value as a geometry + public static GeoShape geoDocValue(Map> doc, String fieldName) { + Object obj = docValue(doc, fieldName); + if (obj != null) { + if (obj instanceof GeoPoint) { + return new GeoShape(((GeoPoint) obj).getLon(), ((GeoPoint) obj).getLat()); + } + // TODO: Add support for geo_shapes when it is there + } + return null; + } + // // Casting // diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java index b24ec56727d64..223e22b2a33ba 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.grouping.GroupingFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime; import org.elasticsearch.xpack.sql.expression.literal.IntervalYearMonth; import org.elasticsearch.xpack.sql.type.DataType; @@ -95,6 +96,13 @@ default ScriptTemplate scriptWithFoldable(Expression foldable) { dataType()); } + if (fold instanceof GeoShape) { + GeoShape geoShape = (GeoShape) fold; + return new ScriptTemplate(processScript("{sql}.stWktToSql({})"), + paramsBuilder().variable(geoShape.toString()).build(), + dataType()); + } + return new ScriptTemplate(processScript("{}"), paramsBuilder().variable(fold).build(), dataType()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java index b06a1fb887433..ed7dc9da77543 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java @@ -408,5 +408,4 @@ public static TemporalAmount negate(TemporalAmount interval) { public static TemporalAmount parseInterval(Source source, String value, DataType intervalType) { return PARSERS.get(intervalType).parse(source, value); } - } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java index 333ba3f11c0b1..d6bdeeb0fe46b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.literal; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import java.util.ArrayList; import java.util.Collection; @@ -30,6 +31,7 @@ public static Collection getNamedWriteab entries.add(new NamedWriteableRegistry.Entry(IntervalDayTime.class, IntervalDayTime.NAME, IntervalDayTime::new)); entries.add(new NamedWriteableRegistry.Entry(IntervalYearMonth.class, IntervalYearMonth.NAME, IntervalYearMonth::new)); + entries.add(new NamedWriteableRegistry.Entry(GeoShape.class, GeoShape.NAME, GeoShape::new)); return entries; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 8495b0269eb84..7e5516810d92a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.sql.planner; +import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.geometry.Point; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Attribute; @@ -38,6 +40,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistance; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.literal.Intervals; import org.elasticsearch.xpack.sql.expression.predicate.Range; @@ -85,6 +89,7 @@ import org.elasticsearch.xpack.sql.querydsl.agg.TopHitsAgg; import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery; import org.elasticsearch.xpack.sql.querydsl.query.ExistsQuery; +import org.elasticsearch.xpack.sql.querydsl.query.GeoDistanceQuery; import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery; import org.elasticsearch.xpack.sql.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery; @@ -656,6 +661,24 @@ private static Query translateQuery(BinaryComparison bc) { Object value = valueOf(bc.right()); String format = dateFormat(bc.left()); + // Possible geo optimization + if (bc.left() instanceof StDistance && value instanceof Number) { + if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { + // Special case for ST_Distance translatable into geo_distance query + StDistance stDistance = (StDistance) bc.left(); + if (stDistance.left() instanceof FieldAttribute && stDistance.right().foldable()) { + Object geoShape = valueOf(stDistance.right()); + if (geoShape instanceof GeoShape) { + Geometry geometry = ((GeoShape) geoShape).toGeometry(); + if (geometry instanceof Point) { + String field = nameOf(stDistance.left()); + return new GeoDistanceQuery(source, field, ((Number) value).doubleValue(), + ((Point) geometry).getLat(), ((Point) geometry).getLon()); + } + } + } + } + } if (bc instanceof GreaterThan) { return new RangeQuery(source, name, value, false, null, false, format); } @@ -954,6 +977,9 @@ public QueryTranslation translate(Expression exp, boolean onAggs) { protected static Query handleQuery(ScalarFunction sf, Expression field, Supplier query) { Query q = query.get(); + if (field instanceof StDistance && q instanceof GeoDistanceQuery) { + return wrapIfNested(q, ((StDistance) field).left()); + } if (field instanceof FieldAttribute) { return wrapIfNested(q, field); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/GeoDistanceQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/GeoDistanceQuery.java new file mode 100644 index 0000000000000..dd1a1171c1603 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/GeoDistanceQuery.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.query; + +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.Objects; + +public class GeoDistanceQuery extends LeafQuery { + + private final String field; + private final double lat; + private final double lon; + private final double distance; + + public GeoDistanceQuery(Source source, String field, double distance, double lat, double lon) { + super(source); + this.field = field; + this.distance = distance; + this.lat = lat; + this.lon = lon; + } + + public String field() { + return field; + } + + public double lat() { + return lat; + } + + public double lon() { + return lon; + } + + public double distance() { + return distance; + } + + @Override + public QueryBuilder asBuilder() { + return QueryBuilders.geoDistanceQuery(field).distance(distance, DistanceUnit.METERS).point(lat, lon); + } + + @Override + public int hashCode() { + return Objects.hash(field, distance, lat, lon); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + GeoDistanceQuery other = (GeoDistanceQuery) obj; + return Objects.equals(field, other.field) && + Objects.equals(distance, other.distance) && + Objects.equals(lat, other.lat) && + Objects.equals(lon, other.lon); + } + + @Override + protected String innerToString() { + return field + ":" + "(" + distance + "," + "(" + lat + ", " + lon + "))"; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 1f04e7c8e1982..76f2436e8629c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -53,6 +53,9 @@ public enum DataType { // // specialized types // + GEO_SHAPE( ExtTypes.GEOMETRY, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, false, false, false), + // display size = 2 doubles + len("POINT( )") + GEO_POINT( ExtTypes.GEOMETRY, Double.BYTES*2, Integer.MAX_VALUE, 25 * 2 + 8, false, false, false), // IP can be v4 or v6. The latter has 2^128 addresses or 340,282,366,920,938,463,463,374,607,431,768,211,456 // aka 39 chars IP( "ip", JDBCType.VARCHAR, 39, 39, 0, false, false, true), @@ -251,6 +254,10 @@ public boolean isPrimitive() { return this != OBJECT && this != NESTED && this != UNSUPPORTED; } + public boolean isGeo() { + return this == GEO_POINT || this == GEO_SHAPE; + } + public boolean isDateBased() { return this == DATE || this == DATETIME; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java index dcd6a1b35a13e..3f985ae4e3b6e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.type; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.expression.literal.Interval; import java.time.OffsetTime; @@ -81,6 +82,9 @@ public static DataType fromJava(Object value) { if (value instanceof Interval) { return ((Interval) value).dataType(); } + if (value instanceof GeoShape) { + return DataType.GEO_SHAPE; + } throw new SqlIllegalArgumentException("No idea what's the DataType for {}", value.getClass()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java index 1ad9dd92abfec..2c07be3eb620d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java @@ -27,7 +27,8 @@ enum ExtTypes implements SQLType { INTERVAL_DAY_TO_SECOND(110), INTERVAL_HOUR_TO_MINUTE(111), INTERVAL_HOUR_TO_SECOND(112), - INTERVAL_MINUTE_TO_SECOND(113); + INTERVAL_MINUTE_TO_SECOND(113), + GEOMETRY(114); private final Integer type; diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 4ac4632572ca0..6d24ea79f2bc2 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -4,7 +4,14 @@ # you may not use this file except in compliance with the Elastic License. # -# This file contains a whitelist for SQL specific utilities available inside SQL scripting +# This file contains a whitelist for SQL specific utilities and classes available inside SQL scripting + +#### Classes + +class org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape { + +} + class org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime { } @@ -137,7 +144,19 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS String space(Number) String substring(String, Number, Number) String ucase(String) - + +# +# Geo Functions +# + GeoShape geoDocValue(java.util.Map, String) + String stAswkt(Object) + Double stDistance(Object, Object) + String stGeometryType(Object) + GeoShape stWktToSql(String) + Double stX(Object) + Double stY(Object) + Double stZ(Object) + # # Casting # diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java index bc7b85b5392e9..b36111ffac3bb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java @@ -158,7 +158,7 @@ public void testDottedFieldPathTypo() { public void testStarExpansionExcludesObjectAndUnsupportedTypes() { LogicalPlan plan = plan("SELECT * FROM test"); List list = ((Project) plan).projections(); - assertThat(list, hasSize(8)); + assertThat(list, hasSize(10)); List names = Expressions.names(list); assertThat(names, not(hasItem("some"))); assertThat(names, not(hasItem("some.dotted"))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index dcf8dad5ecb79..609e6a52c3e0f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -773,4 +773,28 @@ public void testAggregateAliasInFilter() { public void testProjectUnresolvedAliasInFilter() { assertEquals("1:8: Unknown column [tni]", error("SELECT tni AS i FROM test WHERE i > 10 GROUP BY i")); } + + public void testGeoShapeInWhereClause() { + assertEquals("1:49: geo shapes cannot be used for filtering", + error("SELECT ST_AsWKT(shape) FROM test WHERE ST_AsWKT(shape) = 'point (10 20)'")); + + // We get only one message back because the messages are grouped by the node that caused the issue + assertEquals("1:46: geo shapes cannot be used for filtering", + error("SELECT MAX(ST_X(shape)) FROM test WHERE ST_Y(shape) > 10 GROUP BY ST_GEOMETRYTYPE(shape) ORDER BY ST_ASWKT(shape)")); + } + + public void testGeoShapeInGroupBy() { + assertEquals("1:44: geo shapes cannot be used in grouping", + error("SELECT ST_X(shape) FROM test GROUP BY ST_X(shape)")); + } + + public void testGeoShapeInOrderBy() { + assertEquals("1:44: geo shapes cannot be used for sorting", + error("SELECT ST_X(shape) FROM test ORDER BY ST_Z(shape)")); + } + + public void testGeoShapeInSelect() { + accept("SELECT ST_X(shape) FROM test"); + } + } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index 973d5b50fad00..50a3b185dba86 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.DateUtils; @@ -451,6 +452,125 @@ public void testObjectsForSourceValue() throws IOException { assertThat(ex.getMessage(), is("Objects (returned by [" + fieldName + "]) are not supported")); } + public void testGeoShapeExtraction() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false); + Map map = new HashMap<>(); + map.put(fieldName, "POINT (1 2)"); + assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); + + map = new HashMap<>(); + assertNull(fe.extractFromSource(map)); + } + + + public void testMultipleGeoShapeExtraction() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false); + Map map = new HashMap<>(); + map.put(fieldName, "POINT (1 2)"); + assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); + + map = new HashMap<>(); + assertNull(fe.extractFromSource(map)); + + Map map2 = new HashMap<>(); + map2.put(fieldName, Arrays.asList("POINT (1 2)", "POINT (3 4)")); + SqlException ex = expectThrows(SqlException.class, () -> fe.extractFromSource(map2)); + assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); + + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false, true); + assertEquals(new GeoShape(1, 2), lenientFe.extractFromSource(map2)); + } + + public void testGeoPointExtractionFromSource() throws IOException { + int layers = randomIntBetween(1, 3); + String pathCombined = ""; + double lat = randomDoubleBetween(-90, 90, true); + double lon = randomDoubleBetween(-180, 180, true); + SearchHit hit = new SearchHit(1); + XContentBuilder source = JsonXContent.contentBuilder(); + boolean[] arrayWrap = new boolean[layers - 1]; + source.startObject(); { + for (int i = 0; i < layers - 1; i++) { + arrayWrap[i] = randomBoolean(); + String name = randomAlphaOfLength(10); + source.field(name); + if (arrayWrap[i]) { + source.startArray(); + } + source.startObject(); + pathCombined = pathCombined + name + "."; + } + String name = randomAlphaOfLength(10); + pathCombined = pathCombined + name; + source.field(name, randomPoint(lat, lon)); + for (int i = layers - 2; i >= 0; i--) { + source.endObject(); + if (arrayWrap[i]) { + source.endArray(); + } + } + } + source.endObject(); + BytesReference sourceRef = BytesReference.bytes(source); + hit.sourceRef(sourceRef); + + FieldHitExtractor fe = new FieldHitExtractor(pathCombined, DataType.GEO_POINT, UTC, false); + assertEquals(new GeoShape(lon, lat), fe.extract(hit)); + } + + public void testMultipleGeoPointExtractionFromSource() throws IOException { + double lat = randomDoubleBetween(-90, 90, true); + double lon = randomDoubleBetween(-180, 180, true); + SearchHit hit = new SearchHit(1); + String fieldName = randomAlphaOfLength(5); + int arraySize = randomIntBetween(2, 4); + XContentBuilder source = JsonXContent.contentBuilder(); + source.startObject(); { + source.startArray(fieldName); + source.value(randomPoint(lat, lon)); + for (int i = 1; i < arraySize; i++) { + source.value(randomPoint(lat, lon)); + } + source.endArray(); + } + source.endObject(); + BytesReference sourceRef = BytesReference.bytes(source); + hit.sourceRef(sourceRef); + + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, false); + SqlException ex = expectThrows(SqlException.class, () -> fe.extract(hit)); + assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); + + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, false, true); + assertEquals(new GeoShape(lon, lat), lenientFe.extract(hit)); + } + + public void testGeoPointExtractionFromDocValues() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, true); + SearchHit hit = new SearchHit(1); + DocumentField field = new DocumentField(fieldName, singletonList("2, 1")); + hit.fields(singletonMap(fieldName, field)); + assertEquals(new GeoShape(1, 2), fe.extract(hit)); + hit = new SearchHit(1); + assertNull(fe.extract(hit)); + } + + public void testGeoPointExtractionFromMultipleDocValues() { + String fieldName = randomAlphaOfLength(5); + SearchHit hit = new SearchHit(1); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, true); + + hit.fields(singletonMap(fieldName, new DocumentField(fieldName, Arrays.asList("2,1", "3,4")))); + SqlException ex = expectThrows(SqlException.class, () -> fe.extract(hit)); + assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); + + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, true, true); + assertEquals(new GeoShape(1, 2), lenientFe.extract(hit)); + } + private FieldHitExtractor getFieldHitExtractor(String fieldName, boolean useDocValue) { return new FieldHitExtractor(fieldName, null, UTC, useDocValue); } @@ -471,4 +591,18 @@ private Object randomNonNullValue() { ESTestCase::randomDouble)); return value.get(); } + + private Object randomPoint(double lat, double lon) { + Supplier value = randomFrom(Arrays.asList( + () -> lat + "," + lon, + () -> Arrays.asList(lon, lat), + () -> { + Map map1 = new HashMap<>(); + map1.put("lat", lat); + map1.put("lon", lon); + return map1; + } + )); + return value.get(); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessorTests.java new file mode 100644 index 0000000000000..07cc6171cf013 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessorTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; + +import java.io.IOException; + +public class GeoProcessorTests extends AbstractWireSerializingTestCase { + public static GeoProcessor randomGeoProcessor() { + return new GeoProcessor(randomFrom(GeoOperation.values())); + } + + @Override + protected GeoProcessor createTestInstance() { + return randomGeoProcessor(); + } + + @Override + protected Reader instanceReader() { + return GeoProcessor::new; + } + + @Override + protected GeoProcessor mutateInstance(GeoProcessor instance) throws IOException { + return new GeoProcessor(randomValueOtherThan(instance.processor(), () -> randomFrom(GeoOperation.values()))); + } + + public void testApplyAsWKT() throws Exception { + assertEquals("point (10.0 20.0)", new GeoProcessor(GeoOperation.ASWKT).process(new GeoShape(10, 20))); + assertEquals("point (10.0 20.0)", new GeoProcessor(GeoOperation.ASWKT).process(new GeoShape("POINT (10 20)"))); + } + + public void testApplyGeometryType() throws Exception { + assertEquals("POINT", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape(10, 20))); + assertEquals("POINT", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("POINT (10 20)"))); + assertEquals("MULTIPOINT", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("multipoint (2.0 1.0)"))); + assertEquals("LINESTRING", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("LINESTRING (3.0 1.0, 4.0 2.0)"))); + assertEquals("POLYGON", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertEquals("MULTILINESTRING", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("multilinestring ((3.0 1.0, 4.0 2.0), (2.0 1.0, 5.0 6.0))"))); + assertEquals("MULTIPOLYGON", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("multipolygon (((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0)))"))); + assertEquals("ENVELOPE", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("bbox (10.0, 20.0, 40.0, 30.0)"))); + assertEquals("GEOMETRYCOLLECTION", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("geometrycollection (point (20.0 10.0),point (1.0 2.0))"))); + } + + + public void testApplyGetXYZ() throws Exception { + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape(10, 20))); + assertEquals(20.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape(10, 20))); + assertNull(new GeoProcessor(GeoOperation.Z).process(new GeoShape(10, 20))); + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("POINT (10 20)"))); + assertEquals(20.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("POINT (10 20)"))); + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("POINT (10 20 30)"))); + assertEquals(20.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("POINT (10 20 30)"))); + assertEquals(30.0, new GeoProcessor(GeoOperation.Z).process(new GeoShape("POINT (10 20 30)"))); + assertEquals(2.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("multipoint (2.0 1.0)"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("multipoint (2.0 1.0)"))); + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("LINESTRING (3.0 1.0, 4.0 2.0)"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("LINESTRING (3.0 1.0, 4.0 2.0)"))); + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process( + new GeoShape("multilinestring ((3.0 1.0, 4.0 2.0), (2.0 1.0, 5.0 6.0))"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process( + new GeoShape("multilinestring ((3.0 1.0, 4.0 2.0), (2.0 1.0, 5.0 6.0))"))); + // minX minX, maxX, maxY, minY + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("bbox (10.0, 20.0, 40.0, 30.0)"))); + // minY minX, maxX, maxY, minY + assertEquals(30.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("bbox (10.0, 20.0, 40.0, 30.0)"))); + assertEquals(20.0, new GeoProcessor(GeoOperation.X).process( + new GeoShape("geometrycollection (point (20.0 10.0),point (1.0 2.0))"))); + assertEquals(10.0, new GeoProcessor(GeoOperation.Y).process( + new GeoShape("geometrycollection (point (20.0 10.0),point (1.0 2.0))"))); + } + + public void testApplyGetXYZToPolygons() throws Exception { + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertNull(new GeoProcessor(GeoOperation.Z).process(new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertEquals(5.0, new GeoProcessor(GeoOperation.Z).process( + new GeoShape("polygon ((3.0 1.0 5.0, 4.0 2.0 6.0, 4.0 3.0 7.0, 3.0 1.0 5.0))"))); + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("multipolygon (((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0)))"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("multipolygon (((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0)))"))); + } + + public void testApplyNull() { + for (GeoOperation op : GeoOperation.values()) { + GeoProcessor proc = new GeoProcessor(op); + assertNull(proc.process(null)); + } + } + + public void testTypeCheck() { + GeoProcessor proc = new GeoProcessor(GeoOperation.ASWKT); + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, () -> proc.process("string")); + assertEquals("A geo_point or geo_shape is required; received [string]", siae.getMessage()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessorTests.java new file mode 100644 index 0000000000000..9f78f8b3df43b --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessorTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; +import static org.hamcrest.Matchers.instanceOf; + +public class StDistanceProcessorTests extends AbstractWireSerializingTestCase { + + public StDistanceProcessor createTestInstance() { + return new StDistanceProcessor( + constantPoint(randomDoubleBetween(-180, 180, true), randomDoubleBetween(-90, 90, true)), + constantPoint(randomDoubleBetween(-180, 180, true), randomDoubleBetween(-90, 90, true)) + ); + } + + public static Processor constantPoint(double lon, double lat) { + return new ChainingProcessor(new ConstantProcessor("point (" + lon + " " + lat + ")"), StWkttosqlProcessor.INSTANCE); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Processors.getNamedWriteables()); + } + + public void testApply() { + StDistanceProcessor proc = new StDistanceProcessor(constantPoint(10, 20), constantPoint(30, 40)); + Object result = proc.process(null); + assertThat(result, instanceOf(Double.class)); + assertEquals(GeoUtils.arcDistance(20, 10, 40, 30), (double) result, 0.000001); + } + + public void testNullHandling() { + assertNull(new StDistance(EMPTY, l(new GeoShape(1, 2)), l(null)).makePipe().asProcessor().process(null)); + assertNull(new StDistance(EMPTY, l(null), l(new GeoShape(1, 2))).makePipe().asProcessor().process(null)); + } + + public void testTypeCheck() { + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, + () -> new StDistance(EMPTY, l("foo"), l(new GeoShape(1, 2))).makePipe().asProcessor().process(null)); + assertEquals("A geo_point or geo_shape with type point is required; received [foo]", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new StDistance(EMPTY, l(new GeoShape(1, 2)), l("bar")).makePipe().asProcessor().process(null)); + assertEquals("A geo_point or geo_shape with type point is required; received [bar]", siae.getMessage()); + } + + @Override + protected Writeable.Reader instanceReader() { + return StDistanceProcessor::new; + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java new file mode 100644 index 0000000000000..fc7b33ae905d7 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + +import static org.hamcrest.Matchers.instanceOf; + +public class StWkttosqlProcessorTests extends ESTestCase { + public static StWkttosqlProcessor randomStWkttosqlProcessor() { + return new StWkttosqlProcessor(); + } + + public void testApply() { + StWkttosqlProcessor proc = new StWkttosqlProcessor(); + assertNull(proc.process(null)); + Object result = proc.process("POINT (10 20)"); + assertThat(result, instanceOf(GeoShape.class)); + GeoShape geoShape = (GeoShape) result; + assertEquals("point (10.0 20.0)", geoShape.toString()); + } + + public void testTypeCheck() { + StWkttosqlProcessor procPoint = new StWkttosqlProcessor(); + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process(42)); + assertEquals("A string is required; received [42]", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("some random string")); + assertEquals("Cannot parse [some random string] as a geo_shape value", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (foo bar)")); + assertEquals("Cannot parse [point (foo bar)] as a geo_shape value", siae.getMessage()); + + + siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (10 10")); + assertEquals("Cannot parse [point (10 10] as a geo_shape value", siae.getMessage()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index cf6530e2188ff..93f6515f71062 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.IsoWeekOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistance; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ACos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ASin; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ATan; @@ -764,6 +765,15 @@ public void testLiteralsOnTheRight() { assertEquals(FIVE, nullEquals.right()); } + public void testLiteralsOnTheRightInStDistance() { + Alias a = new Alias(EMPTY, "a", L(10)); + Expression result = new BooleanLiteralsOnTheRight().rule(new StDistance(EMPTY, FIVE, a)); + assertTrue(result instanceof StDistance); + StDistance sd = (StDistance) result; + assertEquals(a, sd.left()); + assertEquals(FIVE, sd.right()); + } + public void testBoolSimplifyNotIsNullAndNotIsNotNull() { BooleanSimplification simplification = new BooleanSimplification(); assertTrue(simplification.rule(new Not(EMPTY, new IsNull(EMPTY, ONE))) instanceof IsNotNull); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java index f3f2d9569c53f..9c8c32689b70e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java @@ -57,7 +57,7 @@ public void testSysColumns() { SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, randomValueOtherThanMany(Mode::isDriver, () -> randomFrom(Mode.values()))); // nested fields are ignored - assertEquals(13, rows.size()); + assertEquals(15, rows.size()); assertEquals(24, rows.get(0).size()); List row = rows.get(0); @@ -144,7 +144,7 @@ public void testSysColumnsInOdbcMode() { List> rows = new ArrayList<>(); SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, Mode.ODBC); - assertEquals(13, rows.size()); + assertEquals(15, rows.size()); assertEquals(24, rows.get(0).size()); List row = rows.get(0); @@ -233,7 +233,7 @@ public void testSysColumnsInOdbcMode() { assertEquals(Short.class, nullable(row).getClass()); assertEquals(Short.class, sqlDataType(row).getClass()); assertEquals(Short.class, sqlDataTypeSub(row).getClass()); - + row = rows.get(9); assertEquals("some.ambiguous", name(row)); assertEquals((short) Types.VARCHAR, sqlType(row)); @@ -279,7 +279,7 @@ public void testSysColumnsInJdbcMode() { List> rows = new ArrayList<>(); SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, Mode.JDBC); - assertEquals(13, rows.size()); + assertEquals(15, rows.size()); assertEquals(24, rows.get(0).size()); List row = rows.get(0); @@ -463,7 +463,7 @@ public void testSysColumnsNoArg() throws Exception { public void testSysColumnsWithCatalogWildcard() throws Exception { executeCommand("SYS COLUMNS CATALOG 'cluster' TABLE LIKE 'test' LIKE '%'", emptyList(), r -> { - assertEquals(13, r.size()); + assertEquals(14, r.size()); assertEquals(CLUSTER_NAME, r.column(0)); assertEquals("test", r.column(2)); assertEquals("bool", r.column(3)); @@ -476,7 +476,7 @@ public void testSysColumnsWithCatalogWildcard() throws Exception { public void testSysColumnsWithMissingCatalog() throws Exception { executeCommand("SYS COLUMNS TABLE LIKE 'test' LIKE '%'", emptyList(), r -> { - assertEquals(13, r.size()); + assertEquals(14, r.size()); assertEquals(CLUSTER_NAME, r.column(0)); assertEquals("test", r.column(2)); assertEquals("bool", r.column(3)); @@ -489,7 +489,7 @@ public void testSysColumnsWithMissingCatalog() throws Exception { public void testSysColumnsWithNullCatalog() throws Exception { executeCommand("SYS COLUMNS CATALOG ? TABLE LIKE 'test' LIKE '%'", singletonList(new SqlTypedParamValue("keyword", null)), r -> { - assertEquals(13, r.size()); + assertEquals(14, r.size()); assertEquals(CLUSTER_NAME, r.column(0)); assertEquals("test", r.column(2)); assertEquals("bool", r.column(3)); @@ -529,4 +529,4 @@ private Tuple sql(String sql, List para SqlSession session = new SqlSession(TestUtils.TEST_CFG, null, null, resolver, null, null, null, null, null); return new Tuple<>(cmd, session); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index 4a8da68a1d51e..805268dd5b687 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -48,7 +48,7 @@ public void testSysTypes() { "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", - "UNSUPPORTED", "OBJECT", "NESTED"); + "GEO_SHAPE", "GEO_POINT", "UNSUPPORTED", "OBJECT", "NESTED"); cmd.execute(null, wrap(r -> { assertEquals(19, r.columnCount()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 0543e65d4ae46..693840bd65c34 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.sql.querydsl.agg.GroupByDateHistogram; import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery; import org.elasticsearch.xpack.sql.querydsl.query.ExistsQuery; +import org.elasticsearch.xpack.sql.querydsl.query.GeoDistanceQuery; import org.elasticsearch.xpack.sql.querydsl.query.NotQuery; import org.elasticsearch.xpack.sql.querydsl.query.Query; import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; @@ -65,6 +66,7 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation.PI; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; public class QueryTranslatorTests extends ESTestCase { @@ -496,7 +498,7 @@ public void testTranslateMathFunction_HavingClause_Painless() { assertNull(translation.query); AggFilter aggFilter = translation.aggFilter; assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(InternalSqlScriptUtils." + - operation.name().toLowerCase(Locale.ROOT) + "(params.a0),params.v0))", + operation.name().toLowerCase(Locale.ROOT) + "(params.a0),params.v0))", aggFilter.scriptTemplate().toString()); assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=max(int){a->")); assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=10}]")); @@ -561,6 +563,109 @@ public void testGroupByAndHavingWithFunctionOnTopOfAggregation() { assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=10}]")); } + public void testTranslateStAsWktForPoints() { + LogicalPlan p = plan("SELECT ST_AsWKT(point) FROM test WHERE ST_AsWKT(point) = 'point (10 20)'"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, true); + assertNull(translation.query); + AggFilter aggFilter = translation.aggFilter; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.eq(" + + "InternalSqlScriptUtils.stAswkt(InternalSqlScriptUtils.geoDocValue(doc,params.v0))," + + "params.v1)" + + ")", + aggFilter.scriptTemplate().toString()); + assertEquals("[{v=point}, {v=point (10 20)}]", aggFilter.scriptTemplate().params().toString()); + } + + public void testTranslateStWktToSql() { + LogicalPlan p = plan("SELECT shape FROM test WHERE ST_WKTToSQL(keyword) = ST_WKTToSQL('point (10 20)')"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, true); + assertNull(translation.query); + AggFilter aggFilter = translation.aggFilter; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(" + + "InternalSqlScriptUtils.eq(InternalSqlScriptUtils.stWktToSql(" + + "InternalSqlScriptUtils.docValue(doc,params.v0)),InternalSqlScriptUtils.stWktToSql(params.v1)))", + aggFilter.scriptTemplate().toString()); + assertEquals("[{v=keyword}, {v=point (10.0 20.0)}]", aggFilter.scriptTemplate().params().toString()); + } + + public void testTranslateStDistanceToScript() { + String operator = randomFrom(">", ">="); + String operatorFunction = operator.equalsIgnoreCase(">") ? "gt" : "gte"; + LogicalPlan p = plan("SELECT shape FROM test WHERE ST_Distance(point, ST_WKTToSQL('point (10 20)')) " + operator + " 20"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertTrue(translation.query instanceof ScriptQuery); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(" + + "InternalSqlScriptUtils." + operatorFunction + "(" + + "InternalSqlScriptUtils.stDistance(" + + "InternalSqlScriptUtils.geoDocValue(doc,params.v0),InternalSqlScriptUtils.stWktToSql(params.v1)),params.v2))", + sc.script().toString()); + assertEquals("[{v=point}, {v=point (10.0 20.0)}, {v=20}]", sc.script().params().toString()); + } + + public void testTranslateStDistanceToQuery() { + String operator = randomFrom("<", "<="); + LogicalPlan p = plan("SELECT shape FROM test WHERE ST_Distance(point, ST_WKTToSQL('point (10 20)')) " + operator + " 25"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertTrue(translation.query instanceof GeoDistanceQuery); + GeoDistanceQuery gq = (GeoDistanceQuery) translation.query; + assertEquals("point", gq.field()); + assertEquals(20.0, gq.lat(), 0.00001); + assertEquals(10.0, gq.lon(), 0.00001); + assertEquals(25.0, gq.distance(), 0.00001); + } + + public void testTranslateStXY() { + String dim = randomFrom("X", "Y"); + LogicalPlan p = plan("SELECT ST_AsWKT(point) FROM test WHERE ST_" + dim + "(point) = 10"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertThat(translation.query, instanceOf(ScriptQuery.class)); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.eq(InternalSqlScriptUtils.st" + dim + "(" + + "InternalSqlScriptUtils.geoDocValue(doc,params.v0)),params.v1))", + sc.script().toString()); + assertEquals("[{v=point}, {v=10}]", sc.script().params().toString()); + } + + public void testTranslateStGeometryType() { + LogicalPlan p = plan("SELECT ST_AsWKT(point) FROM test WHERE ST_GEOMETRYTYPE(point) = 'POINT'"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertThat(translation.query, instanceOf(ScriptQuery.class)); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.eq(InternalSqlScriptUtils.stGeometryType(" + + "InternalSqlScriptUtils.geoDocValue(doc,params.v0)),params.v1))", + sc.script().toString()); + assertEquals("[{v=point}, {v=POINT}]", sc.script().params().toString()); + } + public void testTranslateCoalesce_GroupBy_Painless() { LogicalPlan p = plan("SELECT COALESCE(int, 10) FROM test GROUP BY 1"); assertTrue(p instanceof Aggregate); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index 65b491fe71a1d..997de6e2f5c53 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -170,8 +170,11 @@ public void testNestedDoc() { public void testGeoField() { Map mapping = loadMapping("mapping-geo.json"); - EsField dt = mapping.get("location"); - assertThat(dt.getDataType().typeName, is("unsupported")); + assertThat(mapping.size(), is(2)); + EsField gp = mapping.get("location"); + assertThat(gp.getDataType().typeName, is("geo_point")); + EsField gs = mapping.get("site"); + assertThat(gs.getDataType().typeName, is("geo_shape")); } public void testIpField() { diff --git a/x-pack/plugin/sql/src/test/resources/mapping-geo.json b/x-pack/plugin/sql/src/test/resources/mapping-geo.json index 3c958ff37edfc..e6e499ef82e83 100644 --- a/x-pack/plugin/sql/src/test/resources/mapping-geo.json +++ b/x-pack/plugin/sql/src/test/resources/mapping-geo.json @@ -2,6 +2,9 @@ "properties" : { "location" : { "type" : "geo_point" + }, + "site": { + "type" : "geo_shape" } } } diff --git a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json index d93633f7aced0..c75ecfdc845c0 100644 --- a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json +++ b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json @@ -44,6 +44,8 @@ } } }, - "foo_type" : { "type" : "foo" } + "foo_type" : { "type" : "foo" }, + "point": {"type" : "geo_point"}, + "shape": {"type" : "geo_shape"} } } diff --git a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json index 448c50e6a9f0a..e46d64a45e88f 100644 --- a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json +++ b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json @@ -6,6 +6,7 @@ "keyword" : { "type" : "keyword" }, "unsupported" : { "type" : "ip_range" }, "date" : { "type" : "date"}, + "shape": { "type" : "geo_shape" }, "some" : { "properties" : { "dotted" : {