diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 1dc8dc955f7c6..e82101896818e 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -51,4 +51,5 @@ BWC_VERSION: - "2.2.1" - "2.2.2" - "2.3.0" + - "2.3.1" - "2.4.0" diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f39a64534057..3c5554b173f04 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,25 +15,33 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Support for labels on version bump PRs, skip label support for changelog verifier ([#4391](https://github.com/opensearch-project/OpenSearch/pull/4391)) - Update previous release bwc version to 2.4.0 ([#4455](https://github.com/opensearch-project/OpenSearch/pull/4455)) - 2.3.0 release notes ([#4457](https://github.com/opensearch-project/OpenSearch/pull/4457)) - +- Added missing javadocs for `:distribution:tools` modules ([#4483](https://github.com/opensearch-project/OpenSearch/pull/4483)) +- Add BWC version 2.3.1 ([#4513](https://github.com/opensearch-project/OpenSearch/pull/4513)) +- [Segment Replication] Add snapshot and restore tests for segment replication feature ([#3993](https://github.com/opensearch-project/OpenSearch/pull/3993)) +- Added missing javadocs for `:example-plugins` modules ([#4540](https://github.com/opensearch-project/OpenSearch/pull/4540)) ### Dependencies -- Bumps `org.gradle.test-retry` from 1.4.0 to 1.4.1 -- Bumps `reactor-netty-core` from 1.0.19 to 1.0.22 +- Bumps `reactor-core` from 3.4.18 to 3.4.23 ### Dependencies -- Bumps `com.diffplug.spotless` from 6.9.1 to 6.10.0 -- Bumps `xmlbeans` from 5.1.0 to 5.1.1 -- Bumps azure-core-http-netty from 1.12.0 to 1.12.4([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) -- Bumps azure-core from 1.27.0 to 1.31.0([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) -- Bumps azure-storage-common from 12.16.0 to 12.18.0([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `xmlbeans` from 5.1.0 to 5.1.1 ([#4354](https://github.com/opensearch-project/OpenSearch/pull/4354)) +- Bumps azure-core-http-netty from 1.12.0 to 1.12.4 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps azure-core from 1.27.0 to 1.31.0 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps azure-storage-common from 12.16.0 to 12.18.0 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `org.gradle.test-retry` from 1.4.0 to 1.4.1 ([#4411](https://github.com/opensearch-project/OpenSearch/pull/4411)) +- Bumps `reactor-netty-core` from 1.0.19 to 1.0.22 ([#4447](https://github.com/opensearch-project/OpenSearch/pull/4447)) +- Bumps `reactive-streams` from 1.0.3 to 1.0.4 ([#4488](https://github.com/opensearch-project/OpenSearch/pull/4488)) +- Bumps `com.diffplug.spotless` from 6.10.0 to 6.11.0 ([#4547](https://github.com/opensearch-project/OpenSearch/pull/4547)) ### Changed - Dependency updates (httpcore, mockito, slf4j, httpasyncclient, commons-codec) ([#4308](https://github.com/opensearch-project/OpenSearch/pull/4308)) - Use RemoteSegmentStoreDirectory instead of RemoteDirectory ([#4240](https://github.com/opensearch-project/OpenSearch/pull/4240)) - Plugin ZIP publication groupId value is configurable ([#4156](https://github.com/opensearch-project/OpenSearch/pull/4156)) +- Weighted round-robin scheduling policy for shard coordination traffic ([#4241](https://github.com/opensearch-project/OpenSearch/pull/4241)) - Add index specific setting for remote repository ([#4253](https://github.com/opensearch-project/OpenSearch/pull/4253)) - [Segment Replication] Update replicas to commit SegmentInfos instead of relying on SIS files from primary shards. ([#4402](https://github.com/opensearch-project/OpenSearch/pull/4402)) -- Introduce remote translog transfer support([#4480](https://github.com/opensearch-project/OpenSearch/pull/4480)) +- [CCR] Add getHistoryOperationsFromTranslog method to fetch the history snapshot from translogs ([#3948](https://github.com/opensearch-project/OpenSearch/pull/3948)) +- [Remote Store] Change behaviour in replica recovery for remote translog enabled indices ([#4318](https://github.com/opensearch-project/OpenSearch/pull/4318)) +- [Remote Store] Introduce remote translog transfer support([#4480](https://github.com/opensearch-project/OpenSearch/pull/4480)) ### Deprecated @@ -59,6 +67,10 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - [Segment Replication] Fix timeout issue by calculating time needed to process getSegmentFiles ([#4426](https://github.com/opensearch-project/OpenSearch/pull/4426)) - [Bug]: gradle check failing with java heap OutOfMemoryError (([#4328](https://github.com/opensearch-project/OpenSearch/ - `opensearch.bat` fails to execute when install path includes spaces ([#4362](https://github.com/opensearch-project/OpenSearch/pull/4362)) +- Getting security exception due to access denied 'java.lang.RuntimePermission' 'accessDeclaredMembers' when trying to get snapshot with S3 IRSA ([#4469](https://github.com/opensearch-project/OpenSearch/pull/4469)) +- Fixed flaky test `ResourceAwareTasksTests.testTaskIdPersistsInThreadContext` ([#4484](https://github.com/opensearch-project/OpenSearch/pull/4484)) +- Fixed the ignore_malformed setting to also ignore objects ([#4494](https://github.com/opensearch-project/OpenSearch/pull/4494)) +- Updated jackson to 2.13.4 and snakeyml to 1.32 ([#4556](https://github.com/opensearch-project/OpenSearch/pull/4556)) ### Security - CVE-2022-25857 org.yaml:snakeyaml DOS vulnerability ([#4341](https://github.com/opensearch-project/OpenSearch/pull/4341)) @@ -68,6 +80,10 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Github workflow for changelog verification ([#4085](https://github.com/opensearch-project/OpenSearch/pull/4085)) - Label configuration for dependabot PRs ([#4348](https://github.com/opensearch-project/OpenSearch/pull/4348)) - Added RestLayer Changes for PIT stats ([#4217](https://github.com/opensearch-project/OpenSearch/pull/4217)) +- Added GeoBounds aggregation on GeoShape field type.([#4266](https://github.com/opensearch-project/OpenSearch/pull/4266)) + - Addition of Doc values on the GeoShape Field + - Addition of GeoShape ValueSource level code interfaces for accessing the DocValues. + - Addition of Missing Value feature in the GeoShape Aggregations. ### Changed @@ -83,4 +99,4 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) [Unreleased]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...HEAD -[2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...2.x +[2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...2.x \ No newline at end of file diff --git a/build.gradle b/build.gradle index bcae5bc3884a7..11ba3bf9fe105 100644 --- a/build.gradle +++ b/build.gradle @@ -55,7 +55,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.10.0" apply false + id "com.diffplug.spotless" version "6.11.0" apply false id "org.gradle.test-retry" version "1.4.1" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 072dcc4578977..aa6a14ca6e47d 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -9,9 +9,9 @@ bundled_jdk = 17.0.4+8 # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.13.3 -jackson_databind = 2.13.3 -snakeyaml = 1.31 +jackson = 2.13.4 +jackson_databind = 2.13.4 +snakeyaml = 1.32 icu4j = 70.1 supercsv = 2.4.0 log4j = 2.17.1 diff --git a/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 b/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 deleted file mode 100644 index 6e0e2cf9bf2d4..0000000000000 --- a/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a27014716e4421684416e5fa83d896ddb87002da \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.13.4.jar.sha1 b/client/sniffer/licenses/jackson-core-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..c21a7ba4d0043 --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.13.4.jar.sha1 @@ -0,0 +1 @@ +0cf934c681294b97ef6d80082faeefbe1edadf56 \ No newline at end of file diff --git a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java index 725718d85b179..d02e4e98b1287 100644 --- a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java +++ b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java @@ -43,5 +43,10 @@ @Retention(RetentionPolicy.CLASS) @Target({ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE }) public @interface SuppressForbidden { + /** + * The argument to this annotation, specifying the reason a forbidden API is being used. + * + * @return The reason the error is being suppressed. + */ String reason(); } diff --git a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/package-info.java b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/package-info.java new file mode 100644 index 0000000000000..a626a125bb4c9 --- /dev/null +++ b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Tools to validate minimum version of the runtime Java. + */ +package org.opensearch.tools.java_version_checker; diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java index 4789c5df416e6..7a772526cd66b 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java +++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java @@ -36,7 +36,7 @@ import org.opensearch.cli.Terminal; /** - * A cli tool for managing secrets in the opensearch keystore. + * A CLI tool for managing secrets in the OpenSearch keystore. */ public class KeyStoreCli extends LoggingAwareMultiCommand { @@ -52,6 +52,12 @@ private KeyStoreCli() { subcommands.put("has-passwd", new HasPasswordKeyStoreCommand()); } + /** + * Main entry point for the OpenSearch Keystore CLI tool. + * + * @param args CLI commands for managing secrets. + * @throws Exception if an exception was encountered executing the command. + */ public static void main(String[] args) throws Exception { exit(new KeyStoreCli().main(args, Terminal.DEFAULT)); } diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/package-info.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/package-info.java new file mode 100644 index 0000000000000..3969fb4f91e49 --- /dev/null +++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Classes implementing a CLI tool for managing secrets in the OpenSearch keystore. + */ +package org.opensearch.common.settings; diff --git a/distribution/tools/launchers/build.gradle b/distribution/tools/launchers/build.gradle index 52100296ac7e6..7ebe5c7e64416 100644 --- a/distribution/tools/launchers/build.gradle +++ b/distribution/tools/launchers/build.gradle @@ -54,6 +54,5 @@ testingConventions { } javadoc.enabled = false -missingJavadoc.enabled = false loggerUsageCheck.enabled = false jarHell.enabled = false diff --git a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/package-info.java b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/package-info.java new file mode 100644 index 0000000000000..c77d9cab1f468 --- /dev/null +++ b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Classes implementing utility methods for launching JVMs. + */ +package org.opensearch.tools.launchers; diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginCli.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginCli.java index fc93068ce416b..9b06235e87e86 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginCli.java +++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginCli.java @@ -42,7 +42,7 @@ import java.util.Collections; /** - * A cli tool for adding, removing and listing plugins for opensearch. + * A CLI tool for adding, removing and listing plugins for OpenSearch. */ public class PluginCli extends LoggingAwareMultiCommand { @@ -56,6 +56,12 @@ private PluginCli() { commands = Collections.unmodifiableCollection(subcommands.values()); } + /** + * Main entry point for the OpenSearch Plugin CLI tool. + * + * @param args CLI commands for managing plugins. + * @throws Exception if an exception was encountered executing the command. + */ public static void main(String[] args) throws Exception { exit(new PluginCli().main(args, Terminal.DEFAULT)); } diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginHelper.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginHelper.java index 1ef4dd9a36d1c..13d8ab62c1f8d 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginHelper.java +++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/PluginHelper.java @@ -19,6 +19,8 @@ */ public class PluginHelper { + private PluginHelper() {} + /** * Verify if a plugin exists with any folder name. * @param pluginPath the path for the plugins directory. diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/package-info.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/package-info.java new file mode 100644 index 0000000000000..b762e59ae8095 --- /dev/null +++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Classes implementing a CLI tool for managing plugins in OpenSearch. + */ +package org.opensearch.plugins; diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.3.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.4.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..2e9425b8ff6db --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.4.jar.sha1 @@ -0,0 +1 @@ +858c6cc78e1f08a885b1613e1d817c829df70a6e \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.3.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.4.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..fcc6491d1f78d --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.4.jar.sha1 @@ -0,0 +1 @@ +98b0edfa8e4084078f10b7b356c300ded4a71491 \ No newline at end of file diff --git a/gradle/missing-javadoc.gradle b/gradle/missing-javadoc.gradle index 6b3dacd3e905a..a1fde7637796c 100644 --- a/gradle/missing-javadoc.gradle +++ b/gradle/missing-javadoc.gradle @@ -95,18 +95,6 @@ configure([ project(":client:client-benchmark-noop-api-plugin"), project(":client:rest-high-level"), project(":client:test"), - project(":distribution:tools:java-version-checker"), - project(":distribution:tools:keystore-cli"), - project(":distribution:tools:launchers"), - project(":distribution:tools:plugin-cli"), - project(":doc-tools"), - project(":example-plugins:custom-settings"), - project(":example-plugins:custom-significance-heuristic"), - project(":example-plugins:custom-suggester"), - project(":example-plugins:painless-allowlist"), - project(":example-plugins:rescore"), - project(":example-plugins:rest-handler"), - project(":example-plugins:script-expert-scoring"), project(":libs:opensearch-cli"), project(":libs:opensearch-core"), project(":libs:opensearch-dissect"), @@ -159,9 +147,7 @@ configure([ project(":plugins:store-smb"), project(":plugins:transport-nio"), project(":qa:die-with-dignity"), - project(":qa:os"), project(":qa:wildfly"), - project(":rest-api-spec"), project(":test:external-modules:test-delayed-aggs"), project(":test:fixtures:azure-fixture"), project(":test:fixtures:gcs-fixture"), diff --git a/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java b/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java index dfadf9269a097..8aca043017e32 100644 --- a/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java +++ b/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java @@ -88,6 +88,15 @@ public G get(int i) { return shapes.get(i); } + /** + * Returns a {@link List} of All {@link Geometry} present in this collection. + * + * @return a {@link List} of All {@link Geometry} + */ + public List getAll() { + return shapes; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 deleted file mode 100644 index 6e0e2cf9bf2d4..0000000000000 --- a/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a27014716e4421684416e5fa83d896ddb87002da \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-core-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..c21a7ba4d0043 --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.13.4.jar.sha1 @@ -0,0 +1 @@ +0cf934c681294b97ef6d80082faeefbe1edadf56 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 deleted file mode 100644 index a1dd86f11312d..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bf43eed9de0031521107dfea41d1e5d6bf1b9639 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..f8d776d40fdb5 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.13.4.jar.sha1 @@ -0,0 +1 @@ +ccaf21e6a02a20cae6591a12d20bf310544cf3ee \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 deleted file mode 100644 index 864f2da02463f..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4e03e361e2388e3a8a0b68e3b9988d3a07ee3f3 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..6d4962b0b6fa9 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.13.4.jar.sha1 @@ -0,0 +1 @@ +4161a7c3914a12e7b7940ea53eb3c53e17aea91b \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 deleted file mode 100644 index ba45b6520a1d7..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9363ded5441b1fee62d5be0604035690ca759a2a \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..312bd9ae91e4e --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.13.4.jar.sha1 @@ -0,0 +1 @@ +3142ec201e878372d1561e64bd1a947d9e88a03d \ No newline at end of file diff --git a/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 b/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 deleted file mode 100644 index 1ac9b78b88687..0000000000000 --- a/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cf26b7b05fef01e7bec00cb88ab4feeeba743e12 \ No newline at end of file diff --git a/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 b/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 new file mode 100644 index 0000000000000..3216ba485951a --- /dev/null +++ b/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 @@ -0,0 +1 @@ +e80612549feb5c9191c498de628c1aa80693cf0b \ No newline at end of file diff --git a/modules/geo/build.gradle b/modules/geo/build.gradle index 7f687a414e566..6b00709f08bf9 100644 --- a/modules/geo/build.gradle +++ b/modules/geo/build.gradle @@ -40,6 +40,7 @@ restResources { includeCore '_common', 'indices', 'index', 'search', 'bulk' } } + artifacts { restTests(project.file('src/yamlRestTest/resources/rest-api-spec/test')) } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java index 7dc6f2c1b89b7..31ff2ef4689bd 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java @@ -21,6 +21,9 @@ * for the test cluster on which integration tests are running. */ public abstract class GeoModulePluginIntegTestCase extends OpenSearchIntegTestCase { + + protected static final double GEOHASH_TOLERANCE = 1E-5D; + /** * Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this * geo plugin is not getting packaged in a zip, we need to load it before the tests run. diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java index 2ac73728b2dab..9bd082a6e1ffe 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java @@ -8,52 +8,149 @@ package org.opensearch.geo.search; +import org.hamcrest.MatcherAssert; +import org.junit.Before; import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.geo.GeoPoint; import org.opensearch.geo.GeoModulePluginIntegTestCase; +import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper; import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.geo.tests.common.AggregationBuilders; +import org.opensearch.geo.tests.common.RandomGeoGenerator; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.test.OpenSearchIntegTestCase; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.closeTo; +/** + * Tests to validate if user specified a missingValue in the input while doing the aggregation + */ @OpenSearchIntegTestCase.SuiteScopeTestCase public class MissingValueIT extends GeoModulePluginIntegTestCase { + private static final String INDEX_NAME = "idx"; + private static final String GEO_SHAPE_FIELD_NAME = "myshape"; + private static final String GEO_SHAPE_FIELD_TYPE = "type=geo_shape"; + private static final String AGGREGATION_NAME = "bounds"; + private static final String NON_EXISTENT_FIELD = "non_existing_field"; + private static final WellKnownText WKT = WellKnownText.INSTANCE; + private static Geometry indexedGeometry; + private static GeoPoint indexedGeoPoint; + private GeoPoint bottomRight; + private GeoPoint topLeft; + @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get()); + assertAcked( + prepareCreate(INDEX_NAME).setMapping( + "date", + "type=date", + "location", + "type=geo_point", + "str", + "type=keyword", + GEO_SHAPE_FIELD_NAME, + GEO_SHAPE_FIELD_TYPE + ).get() + ); + indexedGeometry = RandomGeoGeometryGenerator.randomGeometry(random()); + indexedGeoPoint = RandomGeoGenerator.randomPoint(random()); + assert indexedGeometry != null; indexRandom( true, - client().prepareIndex("idx").setId("1").setSource(), - client().prepareIndex("idx") + client().prepareIndex(INDEX_NAME).setId("1").setSource(), + client().prepareIndex(INDEX_NAME) .setId("2") - .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2") + .setSource( + "str", + "foo", + "long", + 3L, + "double", + 5.5, + "date", + "2015-05-07", + "location", + indexedGeoPoint.toString(), + GEO_SHAPE_FIELD_NAME, + WKT.toWKT(indexedGeometry) + ) ); } + @Before + public void runBeforeEachTest() { + bottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); + topLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); + } + public void testUnmappedGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(AggregationBuilders.geoBounds("bounds").field("non_existing_field").missing("2,1")) + final GeoPoint missingGeoPoint = RandomGeoGenerator.randomPoint(random()); + GeoBoundsHelper.updateBoundsBottomRight(missingGeoPoint, bottomRight); + GeoBoundsHelper.updateBoundsTopLeft(missingGeoPoint, topLeft); + SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME) + .field(NON_EXISTENT_FIELD) + .wrapLongitude(false) + .missing(missingGeoPoint.toString()) + ) .get(); assertSearchResponse(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); } public void testGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(AggregationBuilders.geoBounds("bounds").field("location").missing("2,1")) + GeoBoundsHelper.updateBoundsForGeoPoint(indexedGeoPoint, topLeft, bottomRight); + final GeoPoint missingGeoPoint = RandomGeoGenerator.randomPoint(random()); + GeoBoundsHelper.updateBoundsForGeoPoint(missingGeoPoint, topLeft, bottomRight); + SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME).field("location").wrapLongitude(false).missing(missingGeoPoint.toString()) + ) .get(); assertSearchResponse(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + public void testGeoBoundsWithMissingShape() { + // create GeoBounds for the indexed Field + GeoBoundsHelper.updateBoundsForGeometry(indexedGeometry, topLeft, bottomRight); + final Geometry missingGeometry = RandomGeoGeometryGenerator.randomGeometry(random()); + assert missingGeometry != null; + GeoBoundsHelper.updateBoundsForGeometry(missingGeometry, topLeft, bottomRight); + final SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME) + .wrapLongitude(false) + .field(GEO_SHAPE_FIELD_NAME) + .missing(WKT.toWKT(missingGeometry)) + ) + .get(); + assertSearchResponse(response); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + public void testUnmappedGeoBoundsOnGeoShape() { + // We cannot useGeometry other than Point as for GeoBoundsAggregation as the Default Value for the + // CoreValueSourceType is GeoPoint hence we need to use Point here. + final Geometry missingGeometry = RandomGeoGeometryGenerator.randomPoint(random()); + final SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation(AggregationBuilders.geoBounds(AGGREGATION_NAME).field(NON_EXISTENT_FIELD).missing(WKT.toWKT(missingGeometry))) + .get(); + GeoBoundsHelper.updateBoundsForGeometry(missingGeometry, topLeft, bottomRight); + assertSearchResponse(response); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + private void validateResult(final GeoBounds bounds) { + MatcherAssert.assertThat(bounds.bottomRight().lat(), closeTo(bottomRight.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.bottomRight().lon(), closeTo(bottomRight.lon(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.topLeft().lat(), closeTo(topLeft.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.topLeft().lon(), closeTo(topLeft.lon(), GEOHASH_TOLERANCE)); } } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java new file mode 100644 index 0000000000000..257cc98db69fc --- /dev/null +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java @@ -0,0 +1,187 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.common; + +import org.junit.Assert; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; + +import java.util.Locale; + +/** + * A helper class for finding the geo bounds for a shape or a point. + */ +public final class GeoBoundsHelper { + + /** + * Updates the GeoBounds for the input GeoPoint in topLeft and bottomRight GeoPoints. + * + * @param geoPoint {@link GeoPoint} + * @param topLeft {@link GeoPoint} + * @param bottomRight {@link GeoPoint} + */ + public static void updateBoundsForGeoPoint(final GeoPoint geoPoint, final GeoPoint topLeft, final GeoPoint bottomRight) { + updateBoundsBottomRight(geoPoint, bottomRight); + updateBoundsTopLeft(geoPoint, topLeft); + } + + /** + * Find the bottom right for a point and put it in the currentBounds param. + * + * @param geoPoint {@link GeoPoint} + * @param currentBound {@link GeoPoint} + */ + public static void updateBoundsBottomRight(final GeoPoint geoPoint, final GeoPoint currentBound) { + if (geoPoint.lat() < currentBound.lat()) { + currentBound.resetLat(geoPoint.lat()); + } + if (geoPoint.lon() > currentBound.lon()) { + currentBound.resetLon(geoPoint.lon()); + } + } + + /** + * Find the top left for a point and put it in the currentBounds param. + * + * @param geoPoint {@link GeoPoint} + * @param currentBound {@link GeoPoint} + */ + public static void updateBoundsTopLeft(final GeoPoint geoPoint, final GeoPoint currentBound) { + if (geoPoint.lat() > currentBound.lat()) { + currentBound.resetLat(geoPoint.lat()); + } + if (geoPoint.lon() < currentBound.lon()) { + currentBound.resetLon(geoPoint.lon()); + } + } + + /** + * Find the bounds for an input shape. + * + * @param geometry {@link Geometry} + * @param geoShapeTopLeft {@link GeoPoint} + * @param geoShapeBottomRight {@link GeoPoint} + */ + public static void updateBoundsForGeometry( + final Geometry geometry, + final GeoPoint geoShapeTopLeft, + final GeoPoint geoShapeBottomRight + ) { + final ShapeType shapeType = geometry.type(); + switch (shapeType) { + case POINT: + updateBoundsTopLeft((Point) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Point) geometry, geoShapeBottomRight); + return; + case MULTIPOINT: + ((MultiPoint) geometry).getAll().forEach(p -> updateBoundsTopLeft(p, geoShapeTopLeft)); + ((MultiPoint) geometry).getAll().forEach(p -> updateBoundsBottomRight(p, geoShapeBottomRight)); + return; + case POLYGON: + updateBoundsTopLeft((Polygon) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Polygon) geometry, geoShapeBottomRight); + return; + case LINESTRING: + updateBoundsTopLeft((Line) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Line) geometry, geoShapeBottomRight); + return; + case MULTIPOLYGON: + ((MultiPolygon) geometry).getAll().forEach(p -> updateBoundsTopLeft(p, geoShapeTopLeft)); + ((MultiPolygon) geometry).getAll().forEach(p -> updateBoundsBottomRight(p, geoShapeBottomRight)); + return; + case GEOMETRYCOLLECTION: + ((GeometryCollection) geometry).getAll() + .forEach(geo -> updateBoundsForGeometry(geo, geoShapeTopLeft, geoShapeBottomRight)); + return; + case MULTILINESTRING: + ((MultiLine) geometry).getAll().forEach(line -> updateBoundsTopLeft(line, geoShapeTopLeft)); + ((MultiLine) geometry).getAll().forEach(line -> updateBoundsBottomRight(line, geoShapeBottomRight)); + return; + case ENVELOPE: + updateBoundsTopLeft((Rectangle) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Rectangle) geometry, geoShapeBottomRight); + return; + default: + Assert.fail(String.format(Locale.ROOT, "The shape type %s is not supported", shapeType)); + } + } + + private static void updateBoundsTopLeft(final Point p, final GeoPoint currentBound) { + final GeoPoint geoPoint = new GeoPoint(p.getLat(), p.getLon()); + updateBoundsTopLeft(geoPoint, currentBound); + } + + private static void updateBoundsTopLeft(final Polygon polygon, final GeoPoint currentBound) { + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double lat = polygon.getPolygon().getLats()[i]; + double lon = polygon.getPolygon().getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsTopLeft(geoPoint, currentBound); + } + } + + private static void updateBoundsTopLeft(final Line line, final GeoPoint currentBound) { + for (int i = 0; i < line.length(); i++) { + double lat = line.getLats()[i]; + double lon = line.getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsTopLeft(geoPoint, currentBound); + } + } + + private static void updateBoundsTopLeft(final Rectangle rectangle, final GeoPoint currentBound) { + if (rectangle.getMaxLat() > currentBound.lat()) { + currentBound.resetLat(rectangle.getMaxLat()); + } + if (rectangle.getMinLon() < currentBound.lon()) { + currentBound.resetLon(rectangle.getMinLon()); + } + } + + private static void updateBoundsBottomRight(final Point p, final GeoPoint currentBound) { + final GeoPoint geoPoint = new GeoPoint(p.getLat(), p.getLon()); + updateBoundsBottomRight(geoPoint, currentBound); + } + + private static void updateBoundsBottomRight(final Polygon polygon, final GeoPoint currentBound) { + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double lat = polygon.getPolygon().getLats()[i]; + double lon = polygon.getPolygon().getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsBottomRight(geoPoint, currentBound); + } + } + + private static void updateBoundsBottomRight(final Line line, final GeoPoint currentBound) { + for (int i = 0; i < line.length(); i++) { + double lat = line.getLats()[i]; + double lon = line.getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsBottomRight(geoPoint, currentBound); + } + } + + private static void updateBoundsBottomRight(final Rectangle rectangle, final GeoPoint currentBound) { + if (rectangle.getMinLat() < currentBound.lat()) { + currentBound.resetLat(rectangle.getMinLat()); + } + if (rectangle.getMaxLon() > currentBound.lon()) { + currentBound.resetLon(rectangle.getMaxLon()); + } + } +} diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java index 92987d407f51d..b6f33ec2e0cae 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java @@ -22,14 +22,20 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.geo.GeoModulePluginIntegTestCase; +import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper; import org.opensearch.geo.tests.common.RandomGeoGenerator; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Geometry; import org.opensearch.geometry.utils.Geohash; +import org.opensearch.geometry.utils.StandardValidator; +import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.search.SearchHit; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import java.util.ArrayList; import java.util.List; +import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; @@ -46,6 +52,7 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul protected static final String SINGLE_VALUED_FIELD_NAME = "geo_value"; protected static final String MULTI_VALUED_FIELD_NAME = "geo_values"; + protected static final String GEO_SHAPE_FIELD_NAME = "shape"; protected static final String NUMBER_FIELD_NAME = "l_values"; protected static final String UNMAPPED_IDX_NAME = "idx_unmapped"; protected static final String IDX_NAME = "idx"; @@ -57,11 +64,13 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul protected static int numDocs; protected static int numUniqueGeoPoints; protected static GeoPoint[] singleValues, multiValues; + protected static Geometry[] geoShapesValues; protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid, - unmappedCentroid; + unmappedCentroid, geoShapeTopLeft, geoShapeBottomRight; protected static ObjectIntMap expectedDocCountsForGeoHash = null; protected static ObjectObjectMap expectedCentroidsForGeoHash = null; - protected static final double GEOHASH_TOLERANCE = 1E-5D; + + protected static final WellKnownText WKT = new WellKnownText(true, new StandardValidator(true)); @Override public void setupSuiteScopeCluster() throws Exception { @@ -75,7 +84,9 @@ public void setupSuiteScopeCluster() throws Exception { NUMBER_FIELD_NAME, "type=long", "tag", - "type=keyword" + "type=keyword", + GEO_SHAPE_FIELD_NAME, + "type=geo_shape" ) ); @@ -83,6 +94,8 @@ public void setupSuiteScopeCluster() throws Exception { singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); multiTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); multiBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); + geoShapeTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); + geoShapeBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); singleCentroid = new GeoPoint(0, 0); multiCentroid = new GeoPoint(0, 0); unmappedCentroid = new GeoPoint(0, 0); @@ -95,17 +108,21 @@ public void setupSuiteScopeCluster() throws Exception { singleValues = new GeoPoint[numUniqueGeoPoints]; for (int i = 0; i < singleValues.length; i++) { singleValues[i] = RandomGeoGenerator.randomPoint(random()); - updateBoundsTopLeft(singleValues[i], singleTopLeft); - updateBoundsBottomRight(singleValues[i], singleBottomRight); + GeoBoundsHelper.updateBoundsForGeoPoint(singleValues[i], singleTopLeft, singleBottomRight); } multiValues = new GeoPoint[numUniqueGeoPoints]; for (int i = 0; i < multiValues.length; i++) { multiValues[i] = RandomGeoGenerator.randomPoint(random()); - updateBoundsTopLeft(multiValues[i], multiTopLeft); - updateBoundsBottomRight(multiValues[i], multiBottomRight); + GeoBoundsHelper.updateBoundsForGeoPoint(multiValues[i], multiTopLeft, multiBottomRight); } + geoShapesValues = new Geometry[numDocs]; + IntStream.range(0, numDocs).forEach(iterator -> { + geoShapesValues[iterator] = RandomGeoGeometryGenerator.randomGeometry(random()); + GeoBoundsHelper.updateBoundsForGeometry(geoShapesValues[iterator], geoShapeTopLeft, geoShapeBottomRight); + }); + List builders = new ArrayList<>(); GeoPoint singleVal; @@ -132,6 +149,7 @@ public void setupSuiteScopeCluster() throws Exception { .endArray() .field(NUMBER_FIELD_NAME, i) .field("tag", "tag" + i) + .field(GEO_SHAPE_FIELD_NAME, WKT.toWKT(geoShapesValues[i])) .endObject() ) ); @@ -147,7 +165,9 @@ public void setupSuiteScopeCluster() throws Exception { ); } - assertAcked(prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point")); + assertAcked( + prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape") + ); assertAcked( prepareCreate(DATELINE_IDX_NAME).setMapping( @@ -274,22 +294,4 @@ private GeoPoint updateHashCentroid(String hash, final GeoPoint location) { final double newLat = centroid.lat() + (location.lat() - centroid.lat()) / docCount; return centroid.reset(newLat, newLon); } - - private void updateBoundsBottomRight(GeoPoint geoPoint, GeoPoint currentBound) { - if (geoPoint.lat() < currentBound.lat()) { - currentBound.resetLat(geoPoint.lat()); - } - if (geoPoint.lon() > currentBound.lon()) { - currentBound.resetLon(geoPoint.lon()); - } - } - - private void updateBoundsTopLeft(GeoPoint geoPoint, GeoPoint currentBound) { - if (geoPoint.lat() > currentBound.lat()) { - currentBound.resetLat(geoPoint.lat()); - } - if (geoPoint.lon() < currentBound.lon()) { - currentBound.resetLon(geoPoint.lon()); - } - } } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java index 8cc82da12d69a..ed3196319faca 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.geo.search.aggregations.metrics; +import org.hamcrest.MatcherAssert; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.util.BigArray; @@ -43,18 +44,18 @@ import java.util.List; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.closeTo; +import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.search.aggregations.AggregationBuilders.global; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds; @OpenSearchIntegTestCase.SuiteScopeTestCase public class GeoBoundsITTestCase extends AbstractGeoAggregatorModulePluginTestCase { @@ -275,4 +276,36 @@ public void testSingleValuedFieldWithZeroLon() throws Exception { assertThat(bottomRight.lat(), closeTo(1.0, GEOHASH_TOLERANCE)); assertThat(bottomRight.lon(), closeTo(0.0, GEOHASH_TOLERANCE)); } + + public void testGeoShapeValuedField() { + final SearchResponse response = client().prepareSearch(IDX_NAME) + .addAggregation(geoBounds(aggName).field(GEO_SHAPE_FIELD_NAME).wrapLongitude(false)) + .get(); + assertSearchResponse(response); + final GeoBounds geoBounds = response.getAggregations().get(aggName); + MatcherAssert.assertThat(geoBounds, notNullValue()); + MatcherAssert.assertThat(geoBounds.getName(), equalTo(aggName)); + final GeoPoint topLeft = geoBounds.topLeft(); + final GeoPoint bottomRight = geoBounds.bottomRight(); + MatcherAssert.assertThat(topLeft.lat(), closeTo(geoShapeTopLeft.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(topLeft.lon(), closeTo(geoShapeTopLeft.lon(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bottomRight.lat(), closeTo(geoShapeBottomRight.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bottomRight.lon(), closeTo(geoShapeBottomRight.lon(), GEOHASH_TOLERANCE)); + } + + public void testEmptyAggregationOnGeoShapes() { + final SearchResponse searchResponse = client().prepareSearch(EMPTY_IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(geoBounds(aggName).field(GEO_SHAPE_FIELD_NAME).wrapLongitude(false)) + .get(); + + MatcherAssert.assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + final GeoBounds geoBounds = searchResponse.getAggregations().get(aggName); + MatcherAssert.assertThat(geoBounds, notNullValue()); + MatcherAssert.assertThat(geoBounds.getName(), equalTo(aggName)); + final GeoPoint topLeft = geoBounds.topLeft(); + final GeoPoint bottomRight = geoBounds.bottomRight(); + MatcherAssert.assertThat(topLeft, equalTo(null)); + MatcherAssert.assertThat(bottomRight, equalTo(null)); + } } diff --git a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java index 25dcf8db2c407..77abba7f54677 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java +++ b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java @@ -40,6 +40,7 @@ import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoTileGrid; import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.opensearch.geo.search.aggregations.metrics.GeoBoundsGeoShapeAggregator; import org.opensearch.geo.search.aggregations.metrics.InternalGeoBounds; import org.opensearch.index.mapper.GeoShapeFieldMapper; import org.opensearch.index.mapper.Mapper; @@ -47,10 +48,13 @@ import org.opensearch.plugins.Plugin; import org.opensearch.plugins.SearchPlugin; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.opensearch.search.aggregations.support.CoreValuesSourceType; +import org.opensearch.search.aggregations.support.ValuesSourceRegistry; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Consumer; public class GeoModulePlugin extends Plugin implements MapperPlugin, SearchPlugin { @@ -102,4 +106,23 @@ public List getCompositeAggregations() { ) ); } + + /** + * Registering the GeoBounds Aggregation on the GeoShape Field. This function allows plugins to register new + * aggregations using aggregation names that are already defined in Core, as long as the new aggregations target + * different ValuesSourceTypes. + * + * @return A list of the new registrar functions + */ + @Override + public List> getAggregationExtentions() { + final Consumer geoShapeConsumer = builder -> builder.register( + GeoBoundsAggregationBuilder.REGISTRY_KEY, + CoreValuesSourceType.GEO_SHAPE, + GeoBoundsGeoShapeAggregator::new, + true + ); + return Collections.singletonList(geoShapeConsumer); + } + } diff --git a/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java new file mode 100644 index 0000000000000..246ece4342cff --- /dev/null +++ b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java @@ -0,0 +1,190 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.algorithm; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.util.CollectionUtils; + +import java.awt.geom.Point2D; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Random; +import java.util.stream.IntStream; + +/** + * Helper class to generate a polygon. Keeping this in the src folder so that GeoSpatial plugin can take advantage of + * this helper to create the Polygons, rather than hardcoding the values. + */ +public class PolygonGenerator { + + private static final Logger LOG = LogManager.getLogger(PolygonGenerator.class); + + /** + * A helper function to create the Polygons for testing. The returned list of double array where first element + * contains all the X points and second contains all the Y points. + * + * @param xPool a {@link java.util.List} of {@link Double} + * @param yPool a {@link java.util.List} of {@link Double} + * @return a {@link List} of double array. + */ + public static List generatePolygon(final List xPool, final List yPool, final Random random) { + if (CollectionUtils.isEmpty(xPool) || CollectionUtils.isEmpty(yPool)) { + LOG.debug("One of the X or Y list is empty or null. X.size : {} Y.size : {}", xPool, yPool); + return Collections.emptyList(); + } + final List generatedPolygonPointsList = ValtrAlgorithm.generateRandomConvexPolygon(xPool, yPool, random); + final double[] x = new double[generatedPolygonPointsList.size()]; + final double[] y = new double[generatedPolygonPointsList.size()]; + IntStream.range(0, generatedPolygonPointsList.size()).forEach(iterator -> { + x[iterator] = generatedPolygonPointsList.get(iterator).getX(); + y[iterator] = generatedPolygonPointsList.get(iterator).getY(); + }); + final List pointsList = new ArrayList<>(); + pointsList.add(x); + pointsList.add(y); + return pointsList; + } + + /* + * MIT License + * + * Copyright (c) 2017 Sander Verdonschot + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + /** + * Provides a helper function to create a Polygon with a list of points. This source code is used to create the + * polygons in the test cases. + * Reference Link + * Visual Link + */ + private static class ValtrAlgorithm { + /** + * Generates a convex polygon using the points provided as a {@link List} of {@link Double} for both X and Y axis. + * + * @param xPool a {@link List} of {@link Double} + * @param yPool a {@link List} of {@link Double} + * @return a {@link List} of {@link Point2D.Double} + */ + private static List generateRandomConvexPolygon( + final List xPool, + final List yPool, + final Random random + ) { + final int n = xPool.size(); + // Sort them + Collections.sort(xPool); + Collections.sort(yPool); + + // Isolate the extreme points + final Double minX = xPool.get(0); + final Double maxX = xPool.get(n - 1); + final Double minY = yPool.get(0); + final Double maxY = yPool.get(n - 1); + + // Divide the interior points into two chains & Extract the vector components + java.util.List xVec = new ArrayList<>(n); + java.util.List yVec = new ArrayList<>(n); + + double lastTop = minX, lastBot = minX; + + for (int i = 1; i < n - 1; i++) { + double x = xPool.get(i); + + if (random.nextBoolean()) { + xVec.add(x - lastTop); + lastTop = x; + } else { + xVec.add(lastBot - x); + lastBot = x; + } + } + + xVec.add(maxX - lastTop); + xVec.add(lastBot - maxX); + + double lastLeft = minY, lastRight = minY; + + for (int i = 1; i < n - 1; i++) { + double y = yPool.get(i); + + if (random.nextBoolean()) { + yVec.add(y - lastLeft); + lastLeft = y; + } else { + yVec.add(lastRight - y); + lastRight = y; + } + } + + yVec.add(maxY - lastLeft); + yVec.add(lastRight - maxY); + + // Randomly pair up the X- and Y-components + Collections.shuffle(yVec, random); + + // Combine the paired up components into vectors + List vec = new ArrayList<>(n); + + for (int i = 0; i < n; i++) { + vec.add(new Point2D.Double(xVec.get(i), yVec.get(i))); + } + + // Sort the vectors by angle + Collections.sort(vec, Comparator.comparingDouble(v -> Math.atan2(v.getY(), v.getX()))); + + // Lay them end-to-end + double x = 0, y = 0; + double minPolygonX = 0; + double minPolygonY = 0; + List points = new ArrayList<>(n); + + for (int i = 0; i < n; i++) { + points.add(new Point2D.Double(x, y)); + + x += vec.get(i).getX(); + y += vec.get(i).getY(); + + minPolygonX = Math.min(minPolygonX, x); + minPolygonY = Math.min(minPolygonY, y); + } + + // Move the polygon to the original min and max coordinates + double xShift = minX - minPolygonX; + double yShift = minY - minPolygonY; + + for (int i = 0; i < n; i++) { + Point2D.Double p = points.get(i); + points.set(i, new Point2D.Double(p.x + xShift, p.y + yShift)); + } + + return points; + } + } + +} diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java new file mode 100644 index 0000000000000..918b9a6701490 --- /dev/null +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.LeafReaderContext; +import org.opensearch.common.geo.GeoShapeDocValue; +import org.opensearch.common.util.BigArrays; +import org.opensearch.index.fielddata.GeoShapeValue; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.LeafBucketCollector; +import org.opensearch.search.aggregations.LeafBucketCollectorBase; +import org.opensearch.search.aggregations.support.ValuesSource; +import org.opensearch.search.aggregations.support.ValuesSourceConfig; +import org.opensearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Map; + +/** + * Aggregate all docs into a geographic bounds for field geo_shape. + * + * @opensearch.internal + */ +public final class GeoBoundsGeoShapeAggregator extends AbstractGeoBoundsAggregator { + private static final Logger LOGGER = LogManager.getLogger(GeoBoundsGeoShapeAggregator.class); + + public GeoBoundsGeoShapeAggregator( + String name, + SearchContext searchContext, + Aggregator aggregator, + ValuesSourceConfig valuesSourceConfig, + boolean wrapLongitude, + Map metaData + ) throws IOException { + super(name, searchContext, aggregator, valuesSourceConfig, wrapLongitude, metaData); + } + + @Override + protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector leafBucketCollector) { + if (valuesSource == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + final BigArrays bigArrays = context.bigArrays(); + final GeoShapeValue values = valuesSource.getGeoShapeValues(ctx); + return new LeafBucketCollectorBase(leafBucketCollector, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + setBucketSize(bucket, bigArrays); + if (values.advanceExact(doc)) { + final GeoShapeDocValue value = values.nextValue(); + final GeoShapeDocValue.BoundingRectangle boundingBox = value.getBoundingRectangle(); + if (boundingBox != null) { + double top = tops.get(bucket); + if (boundingBox.getMaxLatitude() > top) { + top = boundingBox.getMaxLatitude(); + } + + double bottom = bottoms.get(bucket); + if (boundingBox.getMinLatitude() < bottom) { + bottom = boundingBox.getMinLatitude(); + } + + double posLeft = posLefts.get(bucket); + if (boundingBox.getMinLongitude() >= 0 && boundingBox.getMinLongitude() < posLeft) { + posLeft = boundingBox.getMinLongitude(); + } + if (boundingBox.getMaxLongitude() >= 0 && boundingBox.getMaxLongitude() < posLeft) { + posLeft = boundingBox.getMaxLongitude(); + } + + double posRight = posRights.get(bucket); + if (boundingBox.getMaxLongitude() >= 0 && boundingBox.getMaxLongitude() > posRight) { + posRight = boundingBox.getMaxLongitude(); + } + if (boundingBox.getMinLongitude() >= 0 && boundingBox.getMinLongitude() > posRight) { + posRight = boundingBox.getMinLongitude(); + } + + double negLeft = negLefts.get(bucket); + if (boundingBox.getMinLongitude() < 0 && boundingBox.getMinLongitude() < negLeft) { + negLeft = boundingBox.getMinLongitude(); + } + if (boundingBox.getMaxLongitude() < 0 && boundingBox.getMaxLongitude() < negLeft) { + negLeft = boundingBox.getMaxLongitude(); + } + + double negRight = negRights.get(bucket); + if (boundingBox.getMaxLongitude() < 0 && boundingBox.getMaxLongitude() > negRight) { + negRight = boundingBox.getMaxLongitude(); + } + if (boundingBox.getMinLongitude() < 0 && boundingBox.getMinLongitude() > negRight) { + negRight = boundingBox.getMinLongitude(); + } + + tops.set(bucket, top); + bottoms.set(bucket, bottom); + posLefts.set(bucket, posLeft); + posRights.set(bucket, posRight); + negLefts.set(bucket, negLeft); + negRights.set(bucket, negRight); + } else { + LOGGER.error("The bounding box was null for the Doc id {}", doc); + } + } + } + }; + } +} diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java new file mode 100644 index 0000000000000..68d9434631364 --- /dev/null +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java @@ -0,0 +1,237 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LatLonShape; +import org.apache.lucene.document.ShapeDocValuesField; +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.hamcrest.MatcherAssert; +import org.junit.Assert; +import org.opensearch.common.geo.GeoBoundingBox; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.common.geo.GeoShapeUtils; +import org.opensearch.geo.GeoModulePlugin; +import org.opensearch.geo.tests.common.AggregationInspectionHelper; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Circle; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.ShapeType; +import org.opensearch.index.mapper.GeoShapeFieldMapper; +import org.opensearch.index.mapper.GeoShapeIndexer; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.plugins.SearchPlugin; +import org.opensearch.search.aggregations.AggregatorTestCase; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Random; + +import static org.hamcrest.Matchers.closeTo; + +public class GeoBoundsGeoShapeAggregatorTests extends AggregatorTestCase { + private static final Logger LOG = LogManager.getLogger(GeoBoundsGeoShapeAggregatorTests.class); + private static final double GEOHASH_TOLERANCE = 1E-5D; + private static final String AGGREGATION_NAME = "my_agg"; + private static final String FIELD_NAME = "field"; + + /** + * Overriding the Search Plugins list with {@link GeoModulePlugin} so that the testcase will know that this plugin is + * to be loaded during the tests. + * + * @return List of {@link SearchPlugin} + */ + @Override + protected List getSearchPlugins() { + return Collections.singletonList(new GeoModulePlugin()); + } + + /** + * Testing Empty aggregator results. + * + * @throws Exception + */ + public void testEmpty() throws Exception { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + final GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder(AGGREGATION_NAME).field(FIELD_NAME) + .wrapLongitude(false); + + final MappedFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(FIELD_NAME); + try (IndexReader reader = w.getReader()) { + IndexSearcher searcher = new IndexSearcher(reader); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + assertTrue(Double.isInfinite(bounds.top)); + assertTrue(Double.isInfinite(bounds.bottom)); + assertTrue(Double.isInfinite(bounds.posLeft)); + assertTrue(Double.isInfinite(bounds.posRight)); + assertTrue(Double.isInfinite(bounds.negLeft)); + assertTrue(Double.isInfinite(bounds.negRight)); + assertFalse(AggregationInspectionHelper.hasValue(bounds)); + } + } + } + + /** + * Testing GeoBoundAggregator for random shapes which are indexed. + * + * @throws Exception + */ + public void testRandom() throws Exception { + final int numDocs = randomIntBetween(50, 100); + final List Y = new ArrayList<>(); + final List X = new ArrayList<>(); + final Random random = random(); + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random, dir)) { + for (int i = 0; i < numDocs; i++) { + final Document document = new Document(); + final Geometry geometry = randomLuceneGeometry(random); + LOG.debug("Random Geometry created for Indexing : {}", geometry); + document.add(createShapeDocValue(geometry)); + w.addDocument(document); + getAllXAndYPoints(geometry, X, Y); + } + final GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder(AGGREGATION_NAME).field(FIELD_NAME) + .wrapLongitude(false); + final MappedFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(FIELD_NAME); + try (IndexReader reader = w.getReader()) { + final IndexSearcher searcher = new IndexSearcher(reader); + final InternalGeoBounds actualBounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + final GeoBoundingBox expectedGeoBounds = getExpectedGeoBounds(X, Y); + MatcherAssert.assertThat( + actualBounds.bottomRight().getLat(), + closeTo(expectedGeoBounds.bottomRight().getLat(), GEOHASH_TOLERANCE) + ); + MatcherAssert.assertThat( + actualBounds.bottomRight().getLon(), + closeTo(expectedGeoBounds.bottomRight().getLon(), GEOHASH_TOLERANCE) + ); + MatcherAssert.assertThat(actualBounds.topLeft().getLat(), closeTo(expectedGeoBounds.topLeft().getLat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(actualBounds.topLeft().getLon(), closeTo(expectedGeoBounds.topLeft().getLon(), GEOHASH_TOLERANCE)); + assertTrue(AggregationInspectionHelper.hasValue(actualBounds)); + } + } + } + + private GeoBoundingBox getExpectedGeoBounds(final List X, final List Y) { + double top = Double.NEGATIVE_INFINITY; + double bottom = Double.POSITIVE_INFINITY; + double posLeft = Double.POSITIVE_INFINITY; + double posRight = Double.NEGATIVE_INFINITY; + double negLeft = Double.POSITIVE_INFINITY; + double negRight = Double.NEGATIVE_INFINITY; + // Finding the bounding box for the shapes. + for (final Double lon : X) { + if (lon >= 0 && lon < posLeft) { + posLeft = lon; + } + if (lon >= 0 && lon > posRight) { + posRight = lon; + } + if (lon < 0 && lon < negLeft) { + negLeft = lon; + } + if (lon < 0 && lon > negRight) { + negRight = lon; + } + } + for (final Double lat : Y) { + if (lat > top) { + top = lat; + } + if (lat < bottom) { + bottom = lat; + } + } + if (Double.isInfinite(posLeft)) { + return new GeoBoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, negRight)); + } else if (Double.isInfinite(negLeft)) { + return new GeoBoundingBox(new GeoPoint(top, posLeft), new GeoPoint(bottom, posRight)); + } else { + return new GeoBoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, posRight)); + } + } + + private void getAllXAndYPoints(final Geometry geometry, final List X, final List Y) { + if (geometry instanceof Point) { + final Point point = (Point) geometry; + X.add(point.getX()); + Y.add(point.getY()); + return; + } else if (geometry instanceof Polygon) { + final Polygon polygon = (Polygon) geometry; + for (int i = 0; i < polygon.getPolygon().getX().length; i++) { + X.add(polygon.getPolygon().getX(i)); + Y.add(polygon.getPolygon().getY(i)); + } + return; + } else if (geometry instanceof Line) { + final Line line = (Line) geometry; + for (int i = 0; i < line.getX().length; i++) { + X.add(line.getX(i)); + Y.add(line.getY(i)); + } + return; + } + Assert.fail( + String.format(Locale.ROOT, "Error cannot convert the %s to a valid indexable format[POINT, POLYGON, LINE]", geometry.getClass()) + ); + } + + private ShapeDocValuesField createShapeDocValue(final Geometry geometry) { + if (geometry instanceof Point) { + final Point point = (Point) geometry; + return LatLonShape.createDocValueField(FIELD_NAME, point.getLat(), point.getLon()); + } else if (geometry instanceof Polygon) { + return LatLonShape.createDocValueField(FIELD_NAME, GeoShapeUtils.toLucenePolygon((Polygon) geometry)); + } else if (geometry instanceof Line) { + return LatLonShape.createDocValueField(FIELD_NAME, GeoShapeUtils.toLuceneLine((Line) geometry)); + } + Assert.fail( + String.format(Locale.ROOT, "Error cannot convert the %s to a valid indexable format[POINT, POLYGON, LINE]", geometry.getClass()) + ); + return null; + } + + /** + * Random function to generate a {@link LatLonGeometry}. Now for indexing of GeoShape field, we index all the + * different Geometry shapes that we support({@link ShapeType}) in OpenSearch are broken down into 3 shapes only. + * Hence, we are generating only 3 shapes : {@link org.apache.lucene.geo.Point}, + * {@link org.apache.lucene.geo.Line}, {@link org.apache.lucene.geo.Polygon}. {@link Circle} is not supported. + * Check {@link GeoShapeIndexer#prepareForIndexing(org.opensearch.geometry.Geometry)} + * + * @return {@link LatLonGeometry} + */ + private static Geometry randomLuceneGeometry(final Random r) { + int shapeNumber = OpenSearchTestCase.randomIntBetween(0, 2); + if (shapeNumber == 0) { + // Point + return RandomGeoGeometryGenerator.randomPoint(r); + } else if (shapeNumber == 1) { + // LineString + return RandomGeoGeometryGenerator.randomLine(r); + } else { + // Polygon + return RandomGeoGeometryGenerator.randomPolygon(r); + } + } + +} diff --git a/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java b/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java new file mode 100644 index 0000000000000..caf15507e08c5 --- /dev/null +++ b/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java @@ -0,0 +1,240 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.tests.common; + +import org.junit.Assert; +import org.opensearch.geo.algorithm.PolygonGenerator; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.LinearRing; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; +import org.opensearch.index.mapper.GeoShapeIndexer; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Random; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +/** + * Random geo generation utilities for randomized geo_shape type testing. + */ +public class RandomGeoGeometryGenerator { + // Just picking a number 10 to be the max edges of a polygon. Don't want to make too large which can impact + // debugging. + private static final int MAX_VERTEXES = 10; + private static final int MAX_MULTIPLE_GEOMETRIES = 10; + + private static final Predicate NOT_SUPPORTED_SHAPES = shapeType -> shapeType != ShapeType.CIRCLE + && shapeType != ShapeType.LINEARRING; + + /** + * Creating list of only supported geometries defined here: {@link GeoShapeIndexer#prepareForIndexing(Geometry)} + */ + private static final List SUPPORTED_SHAPE_TYPES = Arrays.stream(ShapeType.values()) + .filter(NOT_SUPPORTED_SHAPES) + .collect(Collectors.toList()); + + /** + * Returns a random Geometry. It makes sure that only that geometry is returned which is supported by OpenSearch + * while indexing. Check {@link GeoShapeIndexer#prepareForIndexing(Geometry)} + * + * @return {@link Geometry} + */ + public static Geometry randomGeometry(final Random r) { + final ShapeType randomShapeType = SUPPORTED_SHAPE_TYPES.get( + OpenSearchTestCase.randomIntBetween(0, SUPPORTED_SHAPE_TYPES.size() - 1) + ); + switch (randomShapeType) { + case POINT: + return randomPoint(r); + case MULTIPOINT: + return randomMultiPoint(r); + case POLYGON: + return randomPolygon(r); + case LINESTRING: + return randomLine(r); + case MULTIPOLYGON: + return randomMultiPolygon(r); + case GEOMETRYCOLLECTION: + return randomGeometryCollection(r); + case MULTILINESTRING: + return randomMultiLine(r); + case ENVELOPE: + return randomRectangle(r); + default: + Assert.fail(String.format(Locale.ROOT, "Cannot create a geometry of type %s ", randomShapeType)); + } + return null; + } + + /** + * Generate a random point on the Earth Surface. + * + * @param r {@link Random} + * @return {@link Point} + */ + public static Point randomPoint(final Random r) { + double[] pt = getLonAndLatitude(r); + return new Point(pt[0], pt[1]); + } + + /** + * Generate a random polygon on earth surface. + * + * @param r {@link Random} + * @return {@link Polygon} + */ + public static Polygon randomPolygon(final Random r) { + final int vertexCount = OpenSearchTestCase.randomIntBetween(3, MAX_VERTEXES); + return randomPolygonWithFixedVertexCount(r, vertexCount); + } + + /** + * Generate a random line on the earth Surface. + * + * @param r {@link Random} + * @return {@link Line} + */ + public static Line randomLine(final Random r) { + final double[] pt1 = getLonAndLatitude(r); + final double[] pt2 = getLonAndLatitude(r); + final double[] x = { pt1[0], pt2[0] }; + final double[] y = { pt1[1], pt2[1] }; + return new Line(x, y); + } + + /** + * Returns an object of {@link MultiPoint} denoting a list of points on earth surface. + * @param r {@link Random} + * @return {@link MultiPoint} + */ + public static MultiPoint randomMultiPoint(final Random r) { + int multiplePoints = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List pointsList = new ArrayList<>(); + IntStream.range(0, multiplePoints).forEach(i -> pointsList.add(randomPoint(r))); + return new MultiPoint(pointsList); + } + + /** + * Returns an object of {@link MultiPolygon} denoting various polygons on earth surface. + * + * @param r {@link Random} + * @return {@link MultiPolygon} + */ + public static MultiPolygon randomMultiPolygon(final Random r) { + int multiplePolygons = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List polygonList = new ArrayList<>(); + IntStream.range(0, multiplePolygons).forEach(i -> polygonList.add(randomPolygon(r))); + return new MultiPolygon(polygonList); + } + + /** + * Returns an object of {@link GeometryCollection} having various shapes on earth surface. + * + * @param r {@link Random} + * @return {@link GeometryCollection} + */ + public static GeometryCollection randomGeometryCollection(final Random r) { + final List geometries = new ArrayList<>(); + geometries.addAll(randomMultiPoint(r).getAll()); + geometries.addAll(randomMultiPolygon(r).getAll()); + geometries.addAll(randomMultiLine(r).getAll()); + geometries.add(randomPoint(r)); + geometries.add(randomLine(r)); + geometries.add(randomPolygon(r)); + geometries.add(randomRectangle(r)); + return new GeometryCollection<>(geometries); + } + + /** + * Returns a {@link MultiLine} object containing multiple lines on earth surface. + * + * @param r {@link Random} + * @return {@link MultiLine} + */ + public static MultiLine randomMultiLine(Random r) { + int multiLines = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List linesList = new ArrayList<>(); + IntStream.range(0, multiLines).forEach(i -> linesList.add(randomLine(r))); + return new MultiLine(linesList); + } + + /** + * Returns a random {@link Rectangle} created on earth surface. + * + * @param r {@link Random} + * @return {@link Rectangle} + */ + public static Rectangle randomRectangle(final Random r) { + final Polygon polygon = randomPolygonWithFixedVertexCount(r, 4); + double minX = Double.POSITIVE_INFINITY, maxX = Double.NEGATIVE_INFINITY, maxY = Double.NEGATIVE_INFINITY, minY = + Double.POSITIVE_INFINITY; + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double x = polygon.getPolygon().getX()[i]; + double y = polygon.getPolygon().getY()[i]; + + minX = Math.min(minX, x); + minY = Math.min(minY, y); + maxX = Math.max(maxX, x); + maxY = Math.max(maxY, y); + } + return new Rectangle(minX, maxX, maxY, minY); + } + + /** + * Returns a double array where pt[0] : longitude and pt[1] : latitude + * + * @param r {@link Random} + * @return double[] + */ + private static double[] getLonAndLatitude(final Random r) { + double[] pt = new double[2]; + RandomGeoGenerator.randomPoint(r, pt); + return pt; + } + + private static Polygon randomPolygonWithFixedVertexCount(final Random r, final int vertexCount) { + final List xPool = new ArrayList<>(vertexCount); + final List yPool = new ArrayList<>(vertexCount); + IntStream.range(0, vertexCount).forEach(iterator -> { + double[] pt = getLonAndLatitude(r); + xPool.add(pt[0]); + yPool.add(pt[1]); + }); + final List pointsList = PolygonGenerator.generatePolygon(xPool, yPool, r); + // Checking the list + assert vertexCount == pointsList.get(0).length; + assert vertexCount == pointsList.get(1).length; + // Create the linearRing, as we need to close the polygon hence increasing vertexes count by 1 + final double[] x = new double[vertexCount + 1]; + final double[] y = new double[vertexCount + 1]; + IntStream.range(0, vertexCount).forEach(iterator -> { + x[iterator] = pointsList.get(0)[iterator]; + y[iterator] = pointsList.get(1)[iterator]; + }); + // making sure to close the polygon + x[vertexCount] = x[0]; + y[vertexCount] = y[0]; + final LinearRing linearRing = new LinearRing(x, y); + return new Polygon(linearRing); + } + +} diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.13.4.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..2e9425b8ff6db --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-annotations-2.13.4.jar.sha1 @@ -0,0 +1 @@ +858c6cc78e1f08a885b1613e1d817c829df70a6e \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.13.4.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..fcc6491d1f78d --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-databind-2.13.4.jar.sha1 @@ -0,0 +1 @@ +98b0edfa8e4084078f10b7b356c300ded4a71491 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.4.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..2e9425b8ff6db --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-annotations-2.13.4.jar.sha1 @@ -0,0 +1 @@ +858c6cc78e1f08a885b1613e1d817c829df70a6e \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.13.4.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..fcc6491d1f78d --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-databind-2.13.4.jar.sha1 @@ -0,0 +1 @@ +98b0edfa8e4084078f10b7b356c300ded4a71491 \ No newline at end of file diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java index 8413a750e2741..cb2e28210faf1 100644 --- a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java @@ -94,8 +94,13 @@ public class ExampleCustomSettingsConfig { private final List list; private final String filtered; + /** + * Instantiate this object based on the specified environment. + * + * @param environment The environment including paths to custom setting configuration files + */ public ExampleCustomSettingsConfig(final Environment environment) { - // Elasticsearch config directory + // OpenSearch config directory final Path configDir = environment.configDir(); // Resolve the plugin's custom settings file @@ -121,22 +126,47 @@ public ExampleCustomSettingsConfig(final Environment environment) { assert secured != null; } + /** + * Gets the value of the custom.simple String setting. + * + * @return the custom.simple value + */ public String getSimple() { return simple; } + /** + * Gets the value of the custom.bool boolean setting. + * + * @return the custom.bool value + */ public Boolean getBool() { return bool; } + /** + * Gets the value of the custom.validated String setting. + * + * @return the custom.validated value + */ public String getValidated() { return validated; } + /** + * Gets the value of the custom.filtered String setting. + * + * @return the custom.filtered value + */ public String getFiltered() { return filtered; } + /** + * Gets the value of the custom.list list of integers setting. + * + * @return the custom.list value + */ public List getList() { return list; } diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java index aa22938c72a01..0b619102c667f 100644 --- a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java @@ -42,10 +42,19 @@ import static java.util.stream.Collectors.toList; +/** + * An example plugin that includes custom settings. + */ public class ExampleCustomSettingsPlugin extends Plugin { private final ExampleCustomSettingsConfig config; + /** + * Instantiate this plugin with the specified settings and config path. + * + * @param settings The settings for this plugin. + * @param configPath The path to this plugin's configuration files. + */ public ExampleCustomSettingsPlugin(final Settings settings, final Path configPath) { this.config = new ExampleCustomSettingsConfig(new Environment(settings, configPath)); @@ -53,9 +62,6 @@ public ExampleCustomSettingsPlugin(final Settings settings, final Path configPat assert "secret".equals(config.getFiltered()); } - /** - * @return the plugin's custom settings - */ @Override public List> getSettings() { return Arrays.asList( diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java new file mode 100644 index 0000000000000..5af8654201da2 --- /dev/null +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Example classes demonstrating the use of custom settings in a plugin. + */ +package org.opensearch.example.customsettings; diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java index 49098ae36e30f..c646592af63cb 100644 --- a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java @@ -44,6 +44,12 @@ * Plugin declaring a custom {@link SignificanceHeuristic}. */ public class CustomSignificanceHeuristicPlugin extends Plugin implements SearchPlugin { + + /** + * Instantiate this plugin. + */ + public CustomSignificanceHeuristicPlugin() {}; + @Override public List> getSignificanceHeuristics() { return singletonList(new SignificanceHeuristicSpec<>(SimpleHeuristic.NAME, SimpleHeuristic::new, SimpleHeuristic.PARSER)); diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java index 8365a56bcfe4e..9458bf5b75feb 100644 --- a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java @@ -44,13 +44,25 @@ * A simple {@linkplain SignificanceHeuristic} used an example of declaring a custom heuristic. */ public class SimpleHeuristic extends SignificanceHeuristic { + /** + * The name of this NamedWriteable heuristic. + */ public static final String NAME = "simple"; + + /** + * The parser with which to deserialize this object from XContent. + */ public static final ObjectParser PARSER = new ObjectParser<>(NAME, SimpleHeuristic::new); + /** + * Instantiates this object. + */ public SimpleHeuristic() {} /** * Read from a stream. + * + * @param in Input to read the value from */ public SimpleHeuristic(StreamInput in) throws IOException { // Nothing to read diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java new file mode 100644 index 0000000000000..20809857273c4 --- /dev/null +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Example classes demonstrating the use of a custom significance heuristic. + */ +package org.opensearch.example.customsigheuristic; diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java index da154609e5f2f..05f26a8e401e1 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java @@ -41,8 +41,16 @@ import java.io.IOException; import java.util.Locale; +/** + * A custom suggester supportiong suggestion-based search. + */ public class CustomSuggester extends Suggester { + /** + * Instantiate this object. + */ + public CustomSuggester() {} + // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 @Override public Suggest.Suggestion> innerExecute( diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java index 5706b654ffbde..b71a90e700d21 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java @@ -38,7 +38,16 @@ import java.util.Collections; import java.util.List; +/** + * Plugin demonstrating custom suggestion-based search. + */ public class CustomSuggesterPlugin extends Plugin implements SearchPlugin { + + /** + * Instantiate this class. + */ + public CustomSuggesterPlugin() {} + @Override public List> getSuggesters() { return Collections.singletonList( diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java index 50ee700c3a253..f35fde03d261f 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java @@ -46,19 +46,43 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg; +/** + * The suggestion responses corresponding with the suggestions in the request. + */ public class CustomSuggestion extends Suggest.Suggestion { + /** + * An integer representing the type of the suggestion formerly used for internal serialization over the network. + * + * This class is now serialized as a NamedWriteable and this value only remains for backwards compatibility + */ public static final int TYPE = 999; + /** + * A meaningless value used to test that plugin suggesters can add fields to their Suggestion types. + */ public static final ParseField DUMMY = new ParseField("dummy"); private String dummy; + /** + * Instantiate this object with the specified name, size, and value for the configured field. + * + * @param name The name of the suggestion as is defined in the request. + * @param size The suggested term size specified in request, only used for merging shard responses. + * @param dummy The added custom suggestion type. + */ public CustomSuggestion(String name, int size, String dummy) { super(name, size); this.dummy = dummy; } + /** + * Instantiate this object from a stream. + * + * @param in Input to read the value from + * @throws IOException on failure to read the value. + */ public CustomSuggestion(StreamInput in) throws IOException { super(in); dummy = in.readString(); @@ -85,6 +109,8 @@ public int getWriteableType() { * * This can't be serialized to xcontent because Suggestions appear in xcontent as an array of entries, so there is no place * to add a custom field. But we can still use a custom field internally and use it to define a Suggestion's behavior + * + * @return the value. */ public String getDummy() { return dummy; @@ -95,12 +121,23 @@ protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); } + /** + * Instantiate a CustomSuggestion from XContent. + * + * @param parser The XContent parser to use + * @param name Tne name of the suggestion + * @return A new CustomSuggestion instance for the specified name. + * @throws IOException on deserialization error. + */ public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException { CustomSuggestion suggestion = new CustomSuggestion(name, -1, null); parseEntries(parser, suggestion, Entry::fromXContent); return suggestion; } + /** + * Represents a part from the suggest text with suggested options. + */ public static class Entry extends Suggest.Suggestion.Entry { private static final ObjectParser PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new); @@ -117,13 +154,30 @@ public static class Entry extends Suggest.Suggestion.Entry otherEntry) { @@ -150,6 +206,8 @@ protected void merge(Suggest.Suggestion.Entry