diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index e702c97248cdc..944230377d077 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index d013780b1fd0a..5ac361c810627 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.4 + - label: "{{matrix.image}} / 8.13.5 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.5 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.4 + BWC_VERSION: 8.13.5 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index fda4315926b6b..207a332ed6717 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -88,6 +88,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 GRADLE_TASK: - checkPart1 - checkPart2 @@ -113,6 +114,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 BWC_VERSION: $BWC_LIST agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 7e9f6872b9146..7ba46f0f0951c 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -591,8 +591,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.13.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.4#bwcTest + - label: 8.13.5 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.5#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -601,7 +601,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.13.4 + BWC_VERSION: 8.13.5 retry: automatic: - exit_status: "-1" @@ -714,7 +714,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -735,6 +735,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 GRADLE_TASK: - checkPart1 - checkPart2 @@ -760,7 +761,8 @@ steps: - openjdk17 - openjdk21 - openjdk22 - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + - openjdk23 + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 77e51005d5ace..b9afdcf23b858 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,6 +30,6 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.4" + - "8.13.5" - "8.14.0" - "8.15.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 49f3708ce4af9..213e4e93bc81d 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - "7.17.22" - - "8.13.4" + - "8.13.5" - "8.14.0" - "8.15.0" diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index a6a46bb41f112..edd705b66caaa 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -59,3 +59,7 @@ unknown-field License # don't build them ourselves and the license precludes us modifying them # to fix this. library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so + +# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so +# which has no dependencies (not even libc) besides the symbols in the base executable. +shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so diff --git a/docs/build.gradle b/docs/build.gradle index 0eba980e8cc31..7ca4820eea1af 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -1752,6 +1752,7 @@ setups['setup-snapshots'] = setups['setup-repository'] + ''' name: "my_admin_role" body: > { + "description": "Grants full access to all management features within the cluster.", "cluster": ["all"], "indices": [ {"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}} diff --git a/docs/changelog/106486.yaml b/docs/changelog/106486.yaml new file mode 100644 index 0000000000000..b33df50780e02 --- /dev/null +++ b/docs/changelog/106486.yaml @@ -0,0 +1,17 @@ +pr: 106486 +summary: Create custom parser for ISO-8601 datetimes +area: Infra/Core +type: enhancement +issues: + - 102063 +highlight: + title: New custom parser for ISO-8601 datetimes + body: |- + This introduces a new custom parser for ISO-8601 datetimes, for the `iso8601`, `strict_date_optional_time`, and + `strict_date_optional_time_nanos` built-in date formats. This provides a performance improvement over the + default Java date-time parsing. Whilst it maintains much of the same behaviour, + the new parser does not accept nonsensical date-time strings that have multiple fractional seconds fields + or multiple timezone specifiers. If the new parser fails to parse a string, it will then use the previous parser + to parse it. If a large proportion of the input data consists of these invalid strings, this may cause + a small performance degradation. If you wish to force the use of the old parsers regardless, + set the JVM property `es.datetime.java_time_parsers=true` on all ES nodes. diff --git a/docs/changelog/107876.yaml b/docs/changelog/107876.yaml new file mode 100644 index 0000000000000..21624cacf7e1d --- /dev/null +++ b/docs/changelog/107876.yaml @@ -0,0 +1,5 @@ +pr: 107876 +summary: "ESQL: Add aggregates node level reduction" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/107886.yaml b/docs/changelog/107886.yaml new file mode 100644 index 0000000000000..a328bc2a2a208 --- /dev/null +++ b/docs/changelog/107886.yaml @@ -0,0 +1,5 @@ +pr: 107886 +summary: Cluster state role mapper file settings service +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/108088.yaml b/docs/changelog/108088.yaml new file mode 100644 index 0000000000000..95c58f6dc19f1 --- /dev/null +++ b/docs/changelog/108088.yaml @@ -0,0 +1,5 @@ +pr: 108088 +summary: Add a SIMD (AVX2) optimised vector distance function for int7 on x64 +area: "Search" +type: enhancement +issues: [] diff --git a/docs/changelog/108238.yaml b/docs/changelog/108238.yaml new file mode 100644 index 0000000000000..607979c2eb0ac --- /dev/null +++ b/docs/changelog/108238.yaml @@ -0,0 +1,6 @@ +pr: 108238 +summary: "Nativeaccess: try to load all located libsystemds" +area: Infra/Core +type: bug +issues: + - 107878 diff --git a/docs/changelog/108276.yaml b/docs/changelog/108276.yaml deleted file mode 100644 index aaa78073f544e..0000000000000 --- a/docs/changelog/108276.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108276 -summary: Fix tsdb codec when doc-values spread in two blocks -area: TSDB -type: bug -issues: [] diff --git a/docs/changelog/108280.yaml b/docs/changelog/108280.yaml deleted file mode 100644 index b36a2f3769124..0000000000000 --- a/docs/changelog/108280.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108280 -summary: Ensure necessary security context for s3 bulk deletions -area: Snapshot/Restore -type: bug -issues: - - 108049 diff --git a/docs/changelog/108283.yaml b/docs/changelog/108283.yaml deleted file mode 100644 index 6341a8775b729..0000000000000 --- a/docs/changelog/108283.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108283 -summary: Fix `BlockHash` `DirectEncoder` -area: ES|QL -type: bug -issues: - - 108268 diff --git a/docs/changelog/108333.yaml b/docs/changelog/108333.yaml new file mode 100644 index 0000000000000..c3152500ce1b2 --- /dev/null +++ b/docs/changelog/108333.yaml @@ -0,0 +1,5 @@ +pr: 108333 +summary: Allow `read_slm` to call GET /_slm/status +area: ILM+SLM +type: bug +issues: [] diff --git a/docs/changelog/108394.yaml b/docs/changelog/108394.yaml new file mode 100644 index 0000000000000..58f48fa548c6e --- /dev/null +++ b/docs/changelog/108394.yaml @@ -0,0 +1,6 @@ +pr: 108394 +summary: Handle `IndexNotFoundException` +area: Transform +type: bug +issues: + - 107263 diff --git a/docs/changelog/108410.yaml b/docs/changelog/108410.yaml new file mode 100644 index 0000000000000..5fd831231a3be --- /dev/null +++ b/docs/changelog/108410.yaml @@ -0,0 +1,5 @@ +pr: 108410 +summary: GeoIP tasks should wait longer for master +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/108429.yaml b/docs/changelog/108429.yaml new file mode 100644 index 0000000000000..562454a0de256 --- /dev/null +++ b/docs/changelog/108429.yaml @@ -0,0 +1,6 @@ +pr: 108429 +summary: Fix `ClassCastException` in Significant Terms +area: Aggregations +type: bug +issues: + - 108427 diff --git a/docs/changelog/108431.yaml b/docs/changelog/108431.yaml new file mode 100644 index 0000000000000..84607b1b99ac3 --- /dev/null +++ b/docs/changelog/108431.yaml @@ -0,0 +1,5 @@ +pr: 108431 +summary: "ESQL: Disable quoting in FROM command" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/108444.yaml b/docs/changelog/108444.yaml new file mode 100644 index 0000000000000..c946ab24f939a --- /dev/null +++ b/docs/changelog/108444.yaml @@ -0,0 +1,5 @@ +pr: 108444 +summary: "Apm-data: ignore malformed fields, and too many dynamic fields" +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/108452.yaml b/docs/changelog/108452.yaml new file mode 100644 index 0000000000000..fdf531602c806 --- /dev/null +++ b/docs/changelog/108452.yaml @@ -0,0 +1,5 @@ +pr: 108452 +summary: Add the rerank task to the Elasticsearch internal inference service +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/108459.yaml b/docs/changelog/108459.yaml new file mode 100644 index 0000000000000..5e05797f284be --- /dev/null +++ b/docs/changelog/108459.yaml @@ -0,0 +1,6 @@ +pr: 108459 +summary: Do not use global ordinals strategy if the leaf reader context cannot be + obtained +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/108517.yaml b/docs/changelog/108517.yaml new file mode 100644 index 0000000000000..359c8302fdf6c --- /dev/null +++ b/docs/changelog/108517.yaml @@ -0,0 +1,6 @@ +pr: 108517 +summary: Forward `indexServiceSafe` exception to listener +area: Transform +type: bug +issues: + - 108418 diff --git a/docs/changelog/108518.yaml b/docs/changelog/108518.yaml new file mode 100644 index 0000000000000..aad823ccc89f6 --- /dev/null +++ b/docs/changelog/108518.yaml @@ -0,0 +1,5 @@ +pr: 108518 +summary: Remove leading is_ prefix from Enterprise geoip docs +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/108521.yaml b/docs/changelog/108521.yaml new file mode 100644 index 0000000000000..adc7c11a4decd --- /dev/null +++ b/docs/changelog/108521.yaml @@ -0,0 +1,6 @@ +pr: 108521 +summary: Adding override for lintian false positive on `libvec.so` +area: "Packaging" +type: bug +issues: + - 108514 diff --git a/docs/changelog/108522.yaml b/docs/changelog/108522.yaml new file mode 100644 index 0000000000000..5bc064d7995e9 --- /dev/null +++ b/docs/changelog/108522.yaml @@ -0,0 +1,5 @@ +pr: 108522 +summary: Ensure we return non-negative scores when scoring scalar dot-products +area: Vector Search +type: bug +issues: [] diff --git a/docs/changelog/108562.yaml b/docs/changelog/108562.yaml new file mode 100644 index 0000000000000..2a0047fe807fd --- /dev/null +++ b/docs/changelog/108562.yaml @@ -0,0 +1,6 @@ +pr: 108562 +summary: Add `internalClusterTest` for and fix leak in `ExpandSearchPhase` +area: Search +type: bug +issues: + - 108369 diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index b8fb92b1ea15d..732e2e7be46fa 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -1,6 +1,14 @@ -# Distributed Area Team Internals +# Distributed Area Internals -(Summary, brief discussion of our features) +The Distributed Area contains indexing and coordination systems. + +The index path stretches from the user REST command through shard routing down to each individual shard's translog and storage +engine. Reindexing is effectively reading from a source index and writing to a destination index (perhaps on different nodes). +The coordination side includes cluster coordination, shard allocation, cluster autoscaling stats, task management, and cross +cluster replication. Less obvious coordination systems include networking, the discovery plugin system, the snapshot/restore +logic, and shard recovery. + +A guide to the general Elasticsearch components can be found [here](https://github.com/elastic/elasticsearch/blob/main/docs/internal/GeneralArchitectureGuide.md). # Networking @@ -10,70 +18,7 @@ ### ActionListener -Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code which -doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become available. -They support several useful control flows: - -- They can be completed immediately on the calling thread. -- They can be completed concurrently on a different thread. -- They can be stored in a data structure and completed later on when the system reaches a particular state. -- Most commonly, they can be passed on to other methods that themselves require a callback. -- They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run - before or after completion, before passing them on. - -`ActionListener` is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. `ActionListener` is -used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes it easier to compose -parts of the system together without needing to build adapters to convert back and forth between different kinds of callback. It also makes -it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely takes practice and is -certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with `ActionListener` instances -themselves, creating new instances out of existing ones and completing them in interesting ways. See for instance: - -- all the static methods on [ActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java) itself -- [`ThreadedActionListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java) for forking work elsewhere -- [`RefCountingListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java) for running work in parallel -- [`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) for constructing flexible workflows - -Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous code -without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too expensive to -waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means that most of our -code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes a callback. The -entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at -[`org.elasticsearch.rest.BaseRestHandler#prepareRequest`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java#L158-L171), -and transport APIs all start at -[`org.elasticsearch.action.support.TransportAction#doExecute`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/TransportAction.java#L65)) -and the whole system fundamentally works in terms of an event loop (a `io.netty.channel.EventLoop`) which processes network events via -callbacks. - -`ActionListener` is not an _ad-hoc_ invention. Formally speaking, it is our implementation of the general concept of a continuation in the -sense of [_continuation-passing style_](https://en.wikipedia.org/wiki/Continuation-passing_style) (CPS): an extra argument to a function -which defines how to continue the computation when the result is available. This is in contrast to _direct style_ which is the more usual -style of calling methods that return values directly back to the caller so they can continue executing as normal. There's essentially two -ways that computation can continue in Java (it can return a value or it can throw an exception) which is why `ActionListener` has both an -`onResponse()` and an `onFailure()` method. - -CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS also -enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in parallel, -perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be satisfied before -proceeding (e.g. -[`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) -amongst many others). Some languages have first-class support for continuations (e.g. the `async` and `await` primitives in C#) allowing the -programmer to write code in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all -the callbacks ourselves. - -Strictly speaking, CPS requires that a computation _only_ continues by calling the continuation. In Elasticsearch, this means that -asynchronous methods must have `void` return type and may not throw any exceptions. This is mostly the case in our code as written today, -and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In -particular, it's not uncommon to permit some methods to throw an exception, using things like -[`ActionListener#run`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java#L381-L390) -(or an equivalent `try ... catch ...` block) further up the stack to handle it. Some methods also take (and may complete) an -`ActionListener` parameter, but still return a value separately for other local synchronous work. - -This pattern is often used in the transport action layer with the use of the -[ChannelActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java) -class, which wraps a `TransportChannel` produced by the transport layer. `TransportChannel` implementations can hold a reference to a Netty -channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so a -call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, barring -caller timeouts. +See the [Javadocs for `ActionListener`](https://github.com/elastic/elasticsearch/blob/main/server/src/main/java/org/elasticsearch/action/ActionListener.java) (TODO: add useful starter references and explanations for a range of Listener classes. Reference the Netty section.) @@ -133,6 +78,14 @@ are only used for internode operations/communications. ### Work Queues +### RestClient + +The `RestClient` is primarily used in testing, to send requests against cluster nodes in the same format as would users. There +are some uses of `RestClient`, via `RestClientBuilder`, in the production code. For example, remote reindex leverages the +`RestClient` internally as the REST client to the remote elasticsearch cluster, and to take advantage of the compatibility of +`RestClient` requests with much older elasticsearch versions. The `RestClient` is also used externally by the `Java API Client` +to communicate with Elasticsearch. + # Cluster Coordination (Sketch of important classes? Might inform more sections to add for details.) @@ -292,9 +245,101 @@ works in parallel with the storage engine.) # Autoscaling -(Reactive and proactive autoscaling. Explain that we surface recommendations, how control plane uses it.) - -(Sketch / list the different deciders that we have, and then also how we use information from each to make a recommendation.) +The Autoscaling API in ES (Elasticsearch) uses cluster and node level statistics to provide a recommendation +for a cluster size to support the current cluster data and active workloads. ES Autoscaling is paired +with an ES Cloud service that periodically polls the ES elected master node for suggested cluster +changes. The cloud service will add more resources to the cluster based on Elasticsearch's recommendation. +Elasticsearch by itself cannot automatically scale. + +Autoscaling recommendations are tailored for the user [based on user defined policies][], composed of data +roles (hot, frozen, etc) and [deciders][]. There's a public [webinar on autoscaling][], as well as the +public [Autoscaling APIs] docs. + +Autoscaling's current implementation is based primary on storage requirements, as well as memory capacity +for ML and frozen tier. It does not yet support scaling related to search load. Paired with ES Cloud, +autoscaling only scales upward, not downward, except for ML nodes that do get scaled up _and_ down. + +[based on user defined policies]: https://www.elastic.co/guide/en/elasticsearch/reference/current/xpack-autoscaling.html +[deciders]: https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-deciders.html +[webinar on autoscaling]: https://www.elastic.co/webinars/autoscaling-from-zero-to-production-seamlessly +[Autoscaling APIs]: https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-apis.html + +### Plugin REST and TransportAction entrypoints + +Autoscaling is a [plugin][]. All the REST APIs can be found in [autoscaling/rest/][]. +`GetAutoscalingCapacityAction` is the capacity calculation operation REST endpoint, as opposed to the +other rest commands that get/set/delete the policies guiding the capacity calculation. The Transport +Actions can be found in [autoscaling/action/], where [TransportGetAutoscalingCapacityAction][] is the +entrypoint on the master node for calculating the optimal cluster resources based on the autoscaling +policies. + +[plugin]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java#L72 +[autoscaling/rest/]: https://github.com/elastic/elasticsearch/tree/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/rest +[autoscaling/action/]: https://github.com/elastic/elasticsearch/tree/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action +[TransportGetAutoscalingCapacityAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L82-L98 + +### How cluster capacity is determined + +[AutoscalingMetadata][] implements [Metadata.Custom][] in order to persist autoscaling policies. Each +Decider is an implementation of [AutoscalingDeciderService][]. The [AutoscalingCalculateCapacityService][] +is responsible for running the calculation. + +[TransportGetAutoscalingCapacityAction.computeCapacity] is the entry point to [AutoscalingCalculateCapacityService.calculate], +which creates a [AutoscalingDeciderResults][] for [each autoscaling policy][]. [AutoscalingDeciderResults.toXContent][] then +determines the [maximum required capacity][] to return to the caller. [AutoscalingCapacity][] is the base unit of a cluster +resources recommendation. + +The `TransportGetAutoscalingCapacityAction` response is cached to prevent concurrent callers +overloading the system: the operation is expensive. `TransportGetAutoscalingCapacityAction` contains +a [CapacityResponseCache][]. `TransportGetAutoscalingCapacityAction.masterOperation` +calls [through the CapacityResponseCache][], into the `AutoscalingCalculateCapacityService`, to handle +concurrent callers. + +[AutoscalingMetadata]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java#L38 +[Metadata.Custom]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L141-L145 +[AutoscalingDeciderService]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderService.java#L16-L19 +[AutoscalingCalculateCapacityService]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L43 + +[TransportGetAutoscalingCapacityAction.computeCapacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L102-L108 +[AutoscalingCalculateCapacityService.calculate]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L108-L139 +[AutoscalingDeciderResults]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L34-L38 +[each autoscaling policy]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L124-L131 +[AutoscalingDeciderResults.toXContent]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L78 +[maximum required capacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L105-L116 +[AutoscalingCapacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCapacity.java#L27-L35 + +[CapacityResponseCache]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L44-L47 +[through the CapacityResponseCache]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L97 + +### Where the data comes from + +The Deciders each pull data from different sources as needed to inform their decisions. The +[DiskThresholdMonitor][] is one such data source. The Monitor runs on the master node and maintains +lists of nodes that exceed various disk size thresholds. [DiskThresholdSettings][] contains the +threshold settings with which the `DiskThresholdMonitor` runs. + +[DiskThresholdMonitor]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java#L53-L58 +[DiskThresholdSettings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java#L24-L27 + +### Deciders + +The `ReactiveStorageDeciderService` tracks information that demonstrates storage limitations are causing +problems in the cluster. It uses [an algorithm defined here][]. Some examples are +- information from the `DiskThresholdMonitor` to find out whether nodes are exceeding their storage capacity +- number of unassigned shards that failed allocation because of insufficient storage +- the max shard size and minimum node size, and whether these can be satisfied with the existing infrastructure + +[an algorithm defined here]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java#L158-L176 + +The `ProactiveStorageDeciderService` maintains a forecast window that [defaults to 30 minutes][]. It only +runs on data streams (ILM, rollover, etc), not regular indexes. It looks at past [index changes][] that +took place within the forecast window to [predict][] resources that will be needed shortly. + +[defaults to 30 minutes]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L32 +[index changes]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L79-L83 +[predict]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L85-L95 + +There are several more Decider Services, implementing the `AutoscalingDeciderService` interface. # Snapshot / Restore diff --git a/docs/internal/GeneralArchitectureGuide.md b/docs/internal/GeneralArchitectureGuide.md index f865277d07f8f..a2dadb70bf975 100644 --- a/docs/internal/GeneralArchitectureGuide.md +++ b/docs/internal/GeneralArchitectureGuide.md @@ -6,6 +6,66 @@ ## Settings +Elasticsearch supports [cluster-level settings][] and [index-level settings][], configurable via [node-level file settings][] +(e.g. `elasticsearch.yml` file), command line arguments and REST APIs. + +### Declaring a Setting + +[cluster-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html +[index-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-update-settings.html +[node-level file settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html + +The [Setting][] class is the building block for Elasticsearch server settings. Each `Setting` can take multiple [Property][] +declarations to define setting characteristics. All setting values first come from the node-local `elasticsearch.yml` file, +if they are set therein, before falling back to the default specified in their `Setting` declaration. [A setting][] with +`Property.Dynamic` can be updated during runtime, but must be paired with a [local volatile variable like this one][] and +registered in the `ClusterSettings` via a utility like [ClusterSettings#initializeAndWatch()][] to catch and immediately +apply dynamic changes. NB that a common dynamic Setting bug is always reading the value directly from [Metadata#settings()][], +which holds the default and dynamically updated values, but _not_ the node-local `elasticsearch.yml` value. The scope of a +Setting must also be declared, such as `Property.IndexScope` for a setting that applies to indexes, or `Property.NodeScope` +for a cluster-level setting. + +[Setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L57-L80 +[Property]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L82 +[A setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L111-L117 +[local volatile variable like this one]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L123 +[ClusterSettings#initializeAndWatch()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L145 +[Metadata#settings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L713-L715 + +[ClusterSettings][] tracks the [core Elasticsearch settings][]. Ultimately the `ClusterSettings` get loaded via the +[SettingsModule][]. Additional settings from the various plugins are [collected during node construction] and passed into the +[SettingsModule constructor][]. The Plugin interface has a [getSettings()][] method via which each plugin can declare additional +settings. + +[ClusterSettings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L138 +[core Elasticsearch settings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L204-L586 +[SettingsModule]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java#L54 +[collected during node construction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L483 +[SettingsModule constructor]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L491-L495 +[getSettings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/plugins/Plugin.java#L203-L208 + +### Dynamically updating a Setting + +Externally, [TransportClusterUpdateSettingsAction][] and [TransportUpdateSettingsAction][] (and the corresponding REST endpoints) +allow users to dynamically change cluster and index settings, respectively. Internally, `AbstractScopedSettings` (parent class +of `ClusterSettings`) has various helper methods to track dynamic changes: it keeps a [registry of `SettingUpdater`][] consumer +lambdas to run updates when settings are changed in the cluster state. The `ClusterApplierService` [sends setting updates][] +through to the `AbstractScopedSettings`, invoking the consumers registered therein for each updated setting. + +[TransportClusterUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java#L154-L160 +[TransportUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java#L96-L101 +[registry of `SettingUpdater`]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java#L379-L381 +[sends setting updates]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java#L490-L494 + +Index settings are always persisted. They can only be modified on an existing index, and setting values are persisted as part +of the `IndexMetadata`. Cluster settings, however, can be either persisted or transient depending on how they are tied to +[Metadata][] ([applied here][]). Changes to persisted cluster settings will survive a full cluster restart; whereas changes +made to transient cluster settings will reset to their default values, or the `elasticsearch.yml` values, if the cluster +state must ever be reloaded from persisted state. + +[Metadata]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L212-L213 +[applied here]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L2437 + ## Deprecations ## Plugins diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index e5c2db65778d8..9d784f530d63c 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -358,6 +358,8 @@ POST _aliases ---- // TEST[s/^/PUT my-index-2099.05.06-000001\n/] +NOTE: Filters are only applied when using the <>, and are not applied when <>. + [discrete] [[alias-routing]] === Routing diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc index 410bec7ac38ac..6d06e7e6b9045 100644 --- a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc +++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc @@ -30,13 +30,13 @@ Returns information about all stored connector sync jobs ordered by their creati (Optional, integer) The offset from the first result to fetch. Defaults to `0`. `status`:: -(Optional, job status) The job status the fetched sync jobs need to have. +(Optional, job status) A comma-separated list of job statuses to filter the results. Available statuses include: `canceling`, `canceled`, `completed`, `error`, `in_progress`, `pending`, `suspended`. `connector_id`:: (Optional, string) The connector id the fetched sync jobs need to have. `job_type`:: -(Optional, job type) A comma-separated list of job types. +(Optional, job type) A comma-separated list of job types. Available job types are: `full`, `incremental` and `access_control`. [[list-connector-sync-jobs-api-example]] ==== {api-examples-title} diff --git a/docs/reference/data-streams/change-mappings-and-settings.asciidoc b/docs/reference/data-streams/change-mappings-and-settings.asciidoc index 47c3529ceef40..c96f0c7342a96 100644 --- a/docs/reference/data-streams/change-mappings-and-settings.asciidoc +++ b/docs/reference/data-streams/change-mappings-and-settings.asciidoc @@ -602,7 +602,7 @@ stream's oldest backing index. // TESTRESPONSE[s/"index_uuid": "_eEfRrFHS9OyhqWntkgHAQ"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-my-data-stream-2099.03.07-000001"/"index_name": $body.data_streams.0.indices.0.index_name/] // TESTRESPONSE[s/"index_name": ".ds-my-data-stream-2099.03.08-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> First item in the `indices` array for `my-data-stream`. This item contains information about the stream's oldest backing index, diff --git a/docs/reference/data-streams/downsampling-manual.asciidoc b/docs/reference/data-streams/downsampling-manual.asciidoc index 5e0c09f9d2be2..8f6b39d2aa0dd 100644 --- a/docs/reference/data-streams/downsampling-manual.asciidoc +++ b/docs/reference/data-streams/downsampling-manual.asciidoc @@ -389,7 +389,7 @@ This returns: // TESTRESPONSE[s/"ltOJGmqgTVm4T-Buoe7Acg"/$body.data_streams.0.indices.0.index_uuid/] // TESTRESPONSE[s/"2023-07-26T09:26:42.000Z"/$body.data_streams.0.time_series.temporal_ranges.0.start/] // TESTRESPONSE[s/"2023-07-26T13:26:42.000Z"/$body.data_streams.0.time_series.temporal_ranges.0.end/] -// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The backing index for this data stream. Before a backing index can be downsampled, the TSDS needs to be rolled over and diff --git a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc index 6bfa9ad9b00c5..b89f55dd41575 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc @@ -147,7 +147,7 @@ and that the next generation index will also be managed by {ilm-init}: // TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The name of the backing index. <2> For each backing index we display the value of the <> @@ -284,7 +284,7 @@ GET _data_stream/dsl-data-stream // TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The existing backing index will continue to be managed by {ilm-init} <2> The existing backing index will continue to be managed by {ilm-init} @@ -364,7 +364,7 @@ GET _data_stream/dsl-data-stream // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000003"/"index_name": $body.data_streams.0.indices.2.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8abcd1"/"index_uuid": $body.data_streams.0.indices.2.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The backing indices that existed before rollover will continue to be managed by {ilm-init} <2> The backing indices that existed before rollover will continue to be managed by {ilm-init} @@ -462,7 +462,7 @@ GET _data_stream/dsl-data-stream // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000003"/"index_name": $body.data_streams.0.indices.2.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8abcd1"/"index_uuid": $body.data_streams.0.indices.2.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The write index is now managed by {ilm-init} <2> The `lifecycle` configured on the data stream is now disabled. <3> The next write index will be managed by {ilm-init} diff --git a/docs/reference/esql/functions/coalesce.asciidoc b/docs/reference/esql/functions/coalesce.asciidoc deleted file mode 100644 index 2d8c0f379c82e..0000000000000 --- a/docs/reference/esql/functions/coalesce.asciidoc +++ /dev/null @@ -1,13 +0,0 @@ -[discrete] -[[esql-coalesce]] -=== `COALESCE` - -*Syntax* - -[source,esql] ----- -COALESCE(expression1 [, ..., expressionN]) ----- -include::parameters/coalesce.asciidoc[] -include::description/coalesce.asciidoc[] -include::examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc index d835a14856c03..081e3b8589dba 100644 --- a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc +++ b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc @@ -15,7 +15,7 @@ manner. {esql} supports these conditional functions: * <> // end::cond_list[] -include::case.asciidoc[] -include::coalesce.asciidoc[] -include::greatest.asciidoc[] -include::least.asciidoc[] +include::layout/case.asciidoc[] +include::layout/coalesce.asciidoc[] +include::layout/greatest.asciidoc[] +include::layout/least.asciidoc[] diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index 8ce26eaabe381..eceb6378426a2 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -21,4 +21,4 @@ include::layout/date_extract.asciidoc[] include::layout/date_format.asciidoc[] include::layout/date_parse.asciidoc[] include::layout/date_trunc.asciidoc[] -include::now.asciidoc[] +include::layout/now.asciidoc[] diff --git a/docs/reference/esql/functions/description/case.asciidoc b/docs/reference/esql/functions/description/case.asciidoc index 5c98a7a2620d0..c3e80301fbc31 100644 --- a/docs/reference/esql/functions/description/case.asciidoc +++ b/docs/reference/esql/functions/description/case.asciidoc @@ -2,4 +2,4 @@ *Description* -Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. diff --git a/docs/reference/esql/functions/description/greatest.asciidoc b/docs/reference/esql/functions/description/greatest.asciidoc index 3c7cfd3bfb14c..ed705d0bbb59e 100644 --- a/docs/reference/esql/functions/description/greatest.asciidoc +++ b/docs/reference/esql/functions/description/greatest.asciidoc @@ -2,4 +2,6 @@ *Description* -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. + +NOTE: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/description/least.asciidoc b/docs/reference/esql/functions/description/least.asciidoc index 2aeb1f85aa51a..c5daf0bc79ae0 100644 --- a/docs/reference/esql/functions/description/least.asciidoc +++ b/docs/reference/esql/functions/description/least.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. diff --git a/docs/reference/esql/functions/description/now.asciidoc b/docs/reference/esql/functions/description/now.asciidoc new file mode 100644 index 0000000000000..4852c98b4980a --- /dev/null +++ b/docs/reference/esql/functions/description/now.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns current date and time. diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index 678fde7f5d98b..a2c81b9d24a10 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry contains the second geometry. +Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc index 95ab02a39614a..461dd61daef7a 100644 --- a/docs/reference/esql/functions/description/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns are disjoint. +Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index b736ba29a6c8b..48fd7bdb2f338 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index 890f28cb769b0..38a34f518234a 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry is within the second geometry. +Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_x.asciidoc b/docs/reference/esql/functions/description/st_x.asciidoc index beb077bea332c..33d867f862429 100644 --- a/docs/reference/esql/functions/description/st_x.asciidoc +++ b/docs/reference/esql/functions/description/st_x.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. diff --git a/docs/reference/esql/functions/description/st_y.asciidoc b/docs/reference/esql/functions/description/st_y.asciidoc index 19c371d2ef931..b03956a51e1a6 100644 --- a/docs/reference/esql/functions/description/st_y.asciidoc +++ b/docs/reference/esql/functions/description/st_y.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. diff --git a/docs/reference/esql/functions/examples/case.asciidoc b/docs/reference/esql/functions/examples/case.asciidoc new file mode 100644 index 0000000000000..c5c766512ce0b --- /dev/null +++ b/docs/reference/esql/functions/examples/case.asciidoc @@ -0,0 +1,32 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +Determine whether employees are monolingual, bilingual, or polyglot: +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=case] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=case-result] +|=== +Calculate the total connection success rate based on log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate-result] +|=== +Calculate an hourly error rate as a percentage of the total number of log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] +|=== + diff --git a/docs/reference/esql/functions/examples/greatest.asciidoc b/docs/reference/esql/functions/examples/greatest.asciidoc new file mode 100644 index 0000000000000..bd89ad1b3cdd1 --- /dev/null +++ b/docs/reference/esql/functions/examples/greatest.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=greatest] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=greatest-result] +|=== + diff --git a/docs/reference/esql/functions/examples/least.asciidoc b/docs/reference/esql/functions/examples/least.asciidoc new file mode 100644 index 0000000000000..67fc5260f6391 --- /dev/null +++ b/docs/reference/esql/functions/examples/least.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=least] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=least-result] +|=== + diff --git a/docs/reference/esql/functions/examples/now.asciidoc b/docs/reference/esql/functions/examples/now.asciidoc new file mode 100644 index 0000000000000..b8953de93724c --- /dev/null +++ b/docs/reference/esql/functions/examples/now.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNow] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNow-result] +|=== +To retrieve logs from the last hour: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNowWhere-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_x.asciidoc b/docs/reference/esql/functions/examples/st_x.asciidoc new file mode 100644 index 0000000000000..895e76c6c04e2 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_x.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_y.asciidoc b/docs/reference/esql/functions/examples/st_y.asciidoc new file mode 100644 index 0000000000000..895e76c6c04e2 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_y.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc deleted file mode 100644 index 003f1f46e6db5..0000000000000 --- a/docs/reference/esql/functions/greatest.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-greatest]] -=== `GREATEST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/greatest.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the maximum value from multiple columns. This is similar to <> -except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the last string - in alphabetical order. When run on `boolean` columns this will return - `true` if any values are `true`. - -include::types/greatest.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=greatest] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=greatest-result] -|=== diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index 73bc215ac6ade..5959eed62d37b 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "case", - "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "description" : "Accepts pairs of conditions and values. The function returns the value that\nbelongs to the first condition that evaluates to `true`.\n\nIf the number of arguments is odd, the last argument is the default value which\nis returned when no condition matches. If the number of arguments is even, and\nno condition matches, the function returns `null`.", "signatures" : [ { "params" : [ @@ -10,23 +10,226 @@ "name" : "condition", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "boolean", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "cartesian_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "datetime", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "double", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "geo_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "integer", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "ip", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." }, { "name" : "trueValue", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." }, { "name" : "falseValue", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." } ], "variadic" : true, "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "text", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "unsigned_long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "version", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "version" } + ], + "examples" : [ + "FROM employees\n| EVAL type = CASE(\n languages <= 1, \"monolingual\",\n languages <= 2, \"bilingual\",\n \"polyglot\")\n| KEEP emp_no, languages, type", + "FROM sample_data\n| EVAL successful = CASE(\n STARTS_WITH(message, \"Connected to\"), 1,\n message == \"Connection error\", 0\n )\n| STATS success_rate = AVG(successful)", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" ] } diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 87feead06d091..1081b42839577 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -40,7 +40,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -52,13 +52,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -70,7 +70,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -82,13 +82,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -100,7 +100,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -112,13 +112,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -130,7 +130,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -142,13 +142,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index f72f54708c6b1..15c9f58d32d3e 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -2,7 +2,8 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "greatest", - "description" : "Returns the maximum value from many columns.", + "description" : "Returns the maximum value from multiple columns. This is similar to <>\nexcept it is intended to run on multiple columns at once.", + "note" : "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`.", "signatures" : [ { "params" : [ @@ -10,7 +11,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +23,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +41,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +59,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +71,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +89,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +107,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +119,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +137,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +149,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +167,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +179,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +197,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL g = GREATEST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 66efedc0c9fe5..0b922ad6ad3c2 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "least", - "description" : "Returns the minimum value from many columns.", + "description" : "Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +40,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +58,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +70,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +88,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +106,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +118,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +136,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +148,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +166,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +178,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +196,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL l = LEAST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/now.json b/docs/reference/esql/functions/kibana/definition/now.json new file mode 100644 index 0000000000000..9cdb4945afa2e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/now.json @@ -0,0 +1,16 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "now", + "description" : "Returns current date and time.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "datetime" + } + ], + "examples" : [ + "ROW current_date = NOW()", + "FROM sample_data\n| WHERE @timestamp > NOW() - 1 hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json index f4f8003917908..1ef76e46f371a 100644 --- a/docs/reference/esql/functions/kibana/definition/st_contains.json +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_contains", - "description" : "Returns whether the first geometry contains the second geometry.", + "description" : "Returns whether the first geometry contains the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json index 98647b63ff18f..e408a0f98fe6c 100644 --- a/docs/reference/esql/functions/kibana/definition/st_disjoint.json +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_disjoint", - "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "description" : "Returns whether the two geometries or geometry columns are disjoint.\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json index ba619fe57ecf5..2f9f255ab1870 100644 --- a/docs/reference/esql/functions/kibana/definition/st_intersects.json +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_intersects", - "description" : "Returns whether the two geometries or geometry columns intersect.", + "description" : "Returns true if two geometries intersect.\nThey intersect if they have any point in common, including their interior points\n(points along lines or within polygons).\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json index ee98337441ab7..e0cdf62fe0f98 100644 --- a/docs/reference/esql/functions/kibana/definition/st_within.json +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_within", - "description" : "Returns whether the first geometry is within the second geometry.", + "description" : "Returns whether the first geometry is within the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json index 57598b3470e11..c3554a2ee808b 100644 --- a/docs/reference/esql/functions/kibana/definition/st_x.json +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_x", - "description" : "Extracts the x-coordinate from a point geometry.", + "description" : "Extracts the `x` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json index 0dacaa56bb8de..2966ae04f75e4 100644 --- a/docs/reference/esql/functions/kibana/definition/st_y.json +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_y", - "description" : "Extracts the y-coordinate from a point geometry.", + "description" : "Extracts the `y` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md index e1494a5c2af8c..8bb31ee972759 100644 --- a/docs/reference/esql/functions/kibana/docs/case.md +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -3,6 +3,18 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### CASE -Accepts pairs of conditions and values. -The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that +belongs to the first condition that evaluates to `true`. +If the number of arguments is odd, the last argument is the default value which +is returned when no condition matches. If the number of arguments is even, and +no condition matches, the function returns `null`. + +``` +FROM employees +| EVAL type = CASE( + languages <= 1, "monolingual", + languages <= 2, "bilingual", + "polyglot") +| KEEP emp_no, languages, type +``` diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md index 3db0c9ed87aa5..4b3b4027381f8 100644 --- a/docs/reference/esql/functions/kibana/docs/greatest.md +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -3,5 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### GREATEST -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> +except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL g = GREATEST(a, b) +``` +Note: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md index ff2c19592c8e1..7bbbcf79bc374 100644 --- a/docs/reference/esql/functions/kibana/docs/least.md +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -3,5 +3,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### LEAST -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL l = LEAST(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/now.md b/docs/reference/esql/functions/kibana/docs/now.md new file mode 100644 index 0000000000000..5143dc843ebd8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/now.md @@ -0,0 +1,10 @@ + + +### NOW +Returns current date and time. + +``` +ROW current_date = NOW() +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md index 6e23bb9b0f116..99f3a19f9df41 100644 --- a/docs/reference/esql/functions/kibana/docs/st_contains.md +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_CONTAINS Returns whether the first geometry contains the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md index 7cf66b168bd70..4b42954efa5c1 100644 --- a/docs/reference/esql/functions/kibana/docs/st_disjoint.md +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -4,6 +4,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_DISJOINT Returns whether the two geometries or geometry columns are disjoint. +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md index e4db33429dbe3..b0a58b3ab2357 100644 --- a/docs/reference/esql/functions/kibana/docs/st_intersects.md +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -3,7 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_INTERSECTS -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. +They intersect if they have any point in common, including their interior points +(points along lines or within polygons). +This is the inverse of the <> function. +In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ ``` FROM airports diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md index cbb3ae5ee9aca..9ef046e5006f6 100644 --- a/docs/reference/esql/functions/kibana/docs/st_within.md +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_WITHIN Returns whether the first geometry is within the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md index af2f4de1487cd..b113f19e1c76c 100644 --- a/docs/reference/esql/functions/kibana/docs/st_x.md +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_X -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md index 575a5bd3c7d33..db88c3ada63bb 100644 --- a/docs/reference/esql/functions/kibana/docs/st_y.md +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_Y -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/layout/case.asciidoc b/docs/reference/esql/functions/layout/case.asciidoc index 192e74522b8d3..edfc768dc7055 100644 --- a/docs/reference/esql/functions/layout/case.asciidoc +++ b/docs/reference/esql/functions/layout/case.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/case.svg[Embedded,opts=inline] include::../parameters/case.asciidoc[] include::../description/case.asciidoc[] include::../types/case.asciidoc[] +include::../examples/case.asciidoc[] diff --git a/docs/reference/esql/functions/layout/greatest.asciidoc b/docs/reference/esql/functions/layout/greatest.asciidoc index 1ff17f3c3adfe..fff9a32412947 100644 --- a/docs/reference/esql/functions/layout/greatest.asciidoc +++ b/docs/reference/esql/functions/layout/greatest.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/greatest.svg[Embedded,opts=inline] include::../parameters/greatest.asciidoc[] include::../description/greatest.asciidoc[] include::../types/greatest.asciidoc[] +include::../examples/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/layout/least.asciidoc b/docs/reference/esql/functions/layout/least.asciidoc index a14a166c8bfe4..0daee9c181a65 100644 --- a/docs/reference/esql/functions/layout/least.asciidoc +++ b/docs/reference/esql/functions/layout/least.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/least.svg[Embedded,opts=inline] include::../parameters/least.asciidoc[] include::../description/least.asciidoc[] include::../types/least.asciidoc[] +include::../examples/least.asciidoc[] diff --git a/docs/reference/esql/functions/layout/now.asciidoc b/docs/reference/esql/functions/layout/now.asciidoc new file mode 100644 index 0000000000000..52341c1665619 --- /dev/null +++ b/docs/reference/esql/functions/layout/now.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-now]] +=== `NOW` + +*Syntax* + +[.text-center] +image::esql/functions/signature/now.svg[Embedded,opts=inline] + +include::../parameters/now.asciidoc[] +include::../description/now.asciidoc[] +include::../types/now.asciidoc[] +include::../examples/now.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_x.asciidoc b/docs/reference/esql/functions/layout/st_x.asciidoc index ce3824aa157b1..2c2dc191a31a4 100644 --- a/docs/reference/esql/functions/layout/st_x.asciidoc +++ b/docs/reference/esql/functions/layout/st_x.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_x.svg[Embedded,opts=inline] include::../parameters/st_x.asciidoc[] include::../description/st_x.asciidoc[] include::../types/st_x.asciidoc[] +include::../examples/st_x.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_y.asciidoc b/docs/reference/esql/functions/layout/st_y.asciidoc index 702e9097ae689..0708465760bb3 100644 --- a/docs/reference/esql/functions/layout/st_y.asciidoc +++ b/docs/reference/esql/functions/layout/st_y.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_y.svg[Embedded,opts=inline] include::../parameters/st_y.asciidoc[] include::../description/st_y.asciidoc[] include::../types/st_y.asciidoc[] +include::../examples/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc deleted file mode 100644 index 2860eb31090c4..0000000000000 --- a/docs/reference/esql/functions/least.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-least]] -=== `LEAST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/least.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the minimum value from multiple columns. This is similar to -<> except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the first string - in alphabetical order. When run on `boolean` columns this will return - `false` if any values are `false`. - -include::types/least.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=least] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=least-result] -|=== diff --git a/docs/reference/esql/functions/now.asciidoc b/docs/reference/esql/functions/now.asciidoc deleted file mode 100644 index 3c46f557acd1f..0000000000000 --- a/docs/reference/esql/functions/now.asciidoc +++ /dev/null @@ -1,28 +0,0 @@ -[discrete] -[[esql-now]] -=== `NOW` - -*Syntax* - -[source,esql] ----- -NOW() ----- - -*Description* - -Returns current date and time. - -*Example* - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNow] ----- - -To retrieve logs from the last hour: - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNowWhere] ----- \ No newline at end of file diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc index c3617b7c0e32c..ee6f7e499b3b3 100644 --- a/docs/reference/esql/functions/parameters/case.asciidoc +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -3,7 +3,7 @@ *Parameters* `condition`:: - +A condition. `trueValue`:: - +The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches. diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc index 9b62a2e7e0d87..e0860c5bc3030 100644 --- a/docs/reference/esql/functions/parameters/coalesce.asciidoc +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: -Expression to evaluate +Expression to evaluate. `rest`:: -Other expression to evaluate +Other expression to evaluate. diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc index 83ac29d0bf7c9..8d23101aba7f3 100644 --- a/docs/reference/esql/functions/parameters/greatest.asciidoc +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc index 83ac29d0bf7c9..8d23101aba7f3 100644 --- a/docs/reference/esql/functions/parameters/least.asciidoc +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/now.asciidoc b/docs/reference/esql/functions/parameters/now.asciidoc new file mode 100644 index 0000000000000..25b3c973f1a26 --- /dev/null +++ b/docs/reference/esql/functions/parameters/now.asciidoc @@ -0,0 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* diff --git a/docs/reference/esql/functions/parameters/st_contains.asciidoc b/docs/reference/esql/functions/parameters/st_contains.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_contains.asciidoc +++ b/docs/reference/esql/functions/parameters/st_contains.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_intersects.asciidoc +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_within.asciidoc b/docs/reference/esql/functions/parameters/st_within.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_within.asciidoc +++ b/docs/reference/esql/functions/parameters/st_within.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc index 4e8e77dea1f86..b66bfc286a443 100644 --- a/docs/reference/esql/functions/parameters/st_x.asciidoc +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc index 4e8e77dea1f86..b66bfc286a443 100644 --- a/docs/reference/esql/functions/parameters/st_y.asciidoc +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/signature/now.svg b/docs/reference/esql/functions/signature/now.svg new file mode 100644 index 0000000000000..2cd48ac561408 --- /dev/null +++ b/docs/reference/esql/functions/signature/now.svg @@ -0,0 +1 @@ +NOW() \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index b6d178ddd624d..d143681fcf2f2 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -16,9 +16,9 @@ * experimental:[] <> // end::spatial_list[] -include::st_intersects.asciidoc[] -include::st_disjoint.asciidoc[] -include::st_contains.asciidoc[] -include::st_within.asciidoc[] -include::st_x.asciidoc[] -include::st_y.asciidoc[] +include::layout/st_intersects.asciidoc[] +include::layout/st_disjoint.asciidoc[] +include::layout/st_contains.asciidoc[] +include::layout/st_within.asciidoc[] +include::layout/st_x.asciidoc[] +include::layout/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc deleted file mode 100644 index 110c4fe4ca9ec..0000000000000 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_contains]] -=== `ST_CONTAINS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_contains.asciidoc[] -This is the inverse of the <> function. - -include::types/st_contains.asciidoc[] -include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc deleted file mode 100644 index db89ca186a0ff..0000000000000 --- a/docs/reference/esql/functions/st_disjoint.asciidoc +++ /dev/null @@ -1,27 +0,0 @@ -[discrete] -[[esql-st_disjoint]] -=== `ST_DISJOINT` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_disjoint.asciidoc[] -This is the inverse of the <> function. -In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ - -include::types/st_disjoint.asciidoc[] -include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc deleted file mode 100644 index d75a7f3a50e0f..0000000000000 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-st_intersects]] -=== `ST_INTERSECTS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -*Description* - -Returns true if two geometries intersect. -They intersect if they have any point in common, including their interior points -(points along lines or within polygons). -This is the inverse of the <> function. -In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ - -include::types/st_intersects.asciidoc[] -include::examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc deleted file mode 100644 index 0f0190a9de638..0000000000000 --- a/docs/reference/esql/functions/st_within.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_within]] -=== `ST_WITHIN` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_within.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_within.asciidoc[] -This is the inverse of the <> function. - -include::types/st_within.asciidoc[] -include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc deleted file mode 100644 index eec48894b5150..0000000000000 --- a/docs/reference/esql/functions/st_x.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_x]] -=== `ST_X` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_x.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `x` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. - -include::types/st_x.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc deleted file mode 100644 index 8fc7281e395d2..0000000000000 --- a/docs/reference/esql/functions/st_y.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_y]] -=== `ST_Y` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_y.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `y` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. - -include::types/st_y.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index e7d627ab915a1..85e4193b5bf2f 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -5,5 +5,15 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== condition | trueValue | result -keyword +boolean | boolean | boolean +boolean | cartesian_point | cartesian_point +boolean | datetime | datetime +boolean | double | double +boolean | geo_point | geo_point +boolean | integer | integer +boolean | ip | ip +boolean | long | long +boolean | text | text +boolean | unsigned_long | unsigned_long +boolean | version | version |=== diff --git a/docs/reference/esql/functions/types/now.asciidoc b/docs/reference/esql/functions/types/now.asciidoc new file mode 100644 index 0000000000000..5737d98f2f7db --- /dev/null +++ b/docs/reference/esql/functions/types/now.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +result +datetime +|=== diff --git a/docs/reference/esql/processing-commands/enrich.asciidoc b/docs/reference/esql/processing-commands/enrich.asciidoc index f73eea6018cbc..5470d81b2f40b 100644 --- a/docs/reference/esql/processing-commands/enrich.asciidoc +++ b/docs/reference/esql/processing-commands/enrich.asciidoc @@ -57,11 +57,11 @@ in this example). `ENRICH` will look for records in the [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich] +include::{esql-specs}/enrich.csv-spec[tag=enrich] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich-result] |=== To use a column with a different name than the `match_field` defined in the @@ -69,11 +69,11 @@ policy as the match field, use `ON `: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on-result] |=== By default, each of the enrich fields defined in the policy is added as a @@ -82,22 +82,22 @@ column. To explicitly select the enrich fields that are added, use [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with-result] |=== You can rename the columns that are added using `WITH new_name=`: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename-result] |=== In case of name collisions, the newly created columns will override existing diff --git a/docs/reference/high-availability/cluster-design.asciidoc b/docs/reference/high-availability/cluster-design.asciidoc index 3f8e19b47d37a..6c17a494f36ae 100644 --- a/docs/reference/high-availability/cluster-design.asciidoc +++ b/docs/reference/high-availability/cluster-design.asciidoc @@ -7,14 +7,14 @@ nodes to take over their responsibilities, an {es} cluster can continue operating normally if some of its nodes are unavailable or disconnected. There is a limit to how small a resilient cluster can be. All {es} clusters -require: +require the following components to function: -- One <> node -- At least one node for each <>. -- At least one copy of every <>. +- One <> +- At least one node for each <> +- At least one copy of every <> A resilient cluster requires redundancy for every required cluster component. -This means a resilient cluster must have: +This means a resilient cluster must have the following components: - At least three master-eligible nodes - At least two nodes of each role @@ -375,11 +375,11 @@ The cluster will be resilient to the loss of any zone as long as: - There are at least two zones containing data nodes. - Every index that is not a <> has at least one replica of each shard, in addition to the primary. -- Shard allocation awareness is configured to avoid concentrating all copies of - a shard within a single zone. +- <> is configured to + avoid concentrating all copies of a shard within a single zone. - The cluster has at least three master-eligible nodes. At least two of these - nodes are not voting-only master-eligible nodes, and they are spread evenly - across at least three zones. + nodes are not <>, + and they are spread evenly across at least three zones. - Clients are configured to send their requests to nodes in more than one zone or are configured to use a load balancer that balances the requests across an appropriate set of nodes. The {ess-trial}[Elastic Cloud] service provides such diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png new file mode 100644 index 0000000000000..d5a3040cc5343 Binary files /dev/null and b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png differ diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png new file mode 100644 index 0000000000000..ce2ce6b2a95e9 Binary files /dev/null and b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png differ diff --git a/docs/reference/indices/get-data-stream.asciidoc b/docs/reference/indices/get-data-stream.asciidoc index 240a33164b379..0a318cd135914 100644 --- a/docs/reference/indices/get-data-stream.asciidoc +++ b/docs/reference/indices/get-data-stream.asciidoc @@ -358,4 +358,4 @@ The API returns the following response: // TESTRESPONSE[s/"index_name": ".ds-my-data-stream-two-2099.03.08-000001"/"index_name": $body.data_streams.1.indices.0.index_name/] // TESTRESPONSE[s/"index_uuid": "3liBu2SYS5axasRt6fUIpA"/"index_uuid": $body.data_streams.1.indices.0.index_uuid/] // TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] -// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index 5d6ede6acd5ac..6272f4529c5f9 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -11,6 +11,8 @@ independently of each other. TIP: When ingesting key-value pairs with a large, arbitrary set of keys, you might consider modeling each key-value pair as its own nested document with `key` and `value` fields. Instead, consider using the <> data type, which maps an entire object as a single field and allows for simple searches over its contents. Nested documents and queries are typically expensive, so using the `flattened` data type for this use case is a better option. +WARNING: Nested fields have incomplete support in Kibana. While they are visible and searchable in Discover, they cannot be used to build visualizations in Lens. + [[nested-arrays-flattening-objects]] ==== How arrays of objects are flattened diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index d447026fae293..9c6197f9ba40d 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -5,7 +5,7 @@ You can use custom node attributes as _awareness attributes_ to enable {es} to take your physical hardware configuration into account when allocating shards. If {es} knows which nodes are on the same physical server, in the same rack, or in the same zone, it can distribute the primary shard and its replica shards to -minimise the risk of losing all shard copies in the event of a failure. +minimize the risk of losing all shard copies in the event of a failure. When shard allocation awareness is enabled with the <> @@ -19,22 +19,27 @@ allocated in each location. If the number of nodes in each location is unbalanced and there are a lot of replicas, replica shards might be left unassigned. +TIP: Learn more about <>. + [[enabling-awareness]] ===== Enabling shard allocation awareness To enable shard allocation awareness: -. Specify the location of each node with a custom node attribute. For example, -if you want Elasticsearch to distribute shards across different racks, you might -set an awareness attribute called `rack_id` in each node's `elasticsearch.yml` -config file. +. Specify the location of each node with a custom node attribute. For example, +if you want Elasticsearch to distribute shards across different racks, you might +use an awareness attribute called `rack_id`. ++ +You can set custom attributes in two ways: + +- By editing the `elasticsearch.yml` config file: + [source,yaml] -------------------------------------------------------- node.attr.rack_id: rack_one -------------------------------------------------------- + -You can also set custom attributes when you start a node: +- Using the `-E` command line argument when you start a node: + [source,sh] -------------------------------------------------------- @@ -56,17 +61,33 @@ cluster.routing.allocation.awareness.attributes: rack_id <1> + You can also use the <> API to set or update -a cluster's awareness attributes. +a cluster's awareness attributes: ++ +[source,console] +-------------------------------------------------- +PUT /_cluster/settings +{ + "persistent" : { + "cluster.routing.allocation.awareness.attributes" : "rack_id" + } +} +-------------------------------------------------- With this example configuration, if you start two nodes with `node.attr.rack_id` set to `rack_one` and create an index with 5 primary shards and 1 replica of each primary, all primaries and replicas are -allocated across the two nodes. +allocated across the two node. + +.All primaries and replicas allocated across two nodes in the same rack +image::images/shard-allocation/shard-allocation-awareness-one-rack.png[All primaries and replicas are allocated across two nodes in the same rack] If you add two nodes with `node.attr.rack_id` set to `rack_two`, {es} moves shards to the new nodes, ensuring (if possible) that no two copies of the same shard are in the same rack. +.Primaries and replicas allocated across four nodes in two racks, with no two copies of the same shard in the same rack +image::images/shard-allocation/shard-allocation-awareness-two-racks.png[Primaries and replicas are allocated across four nodes in two racks with no two copies of the same shard in the same rack] + If `rack_two` fails and takes down both its nodes, by default {es} allocates the lost shard copies to nodes in `rack_one`. To prevent multiple copies of a particular shard from being allocated in the same location, you can diff --git a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc index 848a29c64279c..2308ec259da48 100644 --- a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc @@ -59,35 +59,40 @@ you configure the remotes. `cluster.remote..transport.compress`:: - Per cluster setting that enables you to configure compression for requests - to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If the inbound request is compressed, - Elasticsearch compresses the response. The setting options are `true`, - `indexing_data`, and `false`. If unset, the global `transport.compress` is - used as the fallback setting. + Per-cluster setting that enables you to configure compression for requests to + a specific remote cluster. The handling cluster will automatically compress + responses to compressed requests. The setting options are `true`, + `indexing_data`, and `false`. If unset, defaults to the behaviour specified + by the node-wide `transport.compress` setting. See the + <> for further information. `cluster.remote..transport.compression_scheme`:: - Per cluster setting that enables you to configure compression scheme for - requests to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If an inbound request is compressed, {es} - compresses the response using the same compression scheme. The setting options - are `deflate` and `lz4`. If unset, the global `transport.compression_scheme` - is used as the fallback setting. + Per-cluster setting that enables you to configure the compression scheme for + requests to a specific cluster if those requests are selected to be + compressed by to the `cluster.remote..transport.compress` + setting. The handling cluster will automatically use the same compression + scheme for responses as for the corresponding requests. The setting options + are `deflate` and `lz4`. If unset, defaults to the behaviour specified by the + node-wide `transport.compression_scheme` setting. See the + <> for further information. - -`cluster.remote..credentials` (<>, <>):: [[remote-cluster-credentials-setting]] - - Per cluster setting for configuring <>. - This setting takes the encoded value of a - <> and must be set - in the <> on each node in the cluster. - The presence (or not) of this setting determines which model a remote cluster uses. - If present, the remote cluster uses the API key based model. - Otherwise, it uses the certificate based model. - If the setting is added, removed, or updated in the <> and reloaded via the - <> API, the cluster will automatically rebuild its connection to the remote. +`cluster.remote..credentials`:: + + (<>, <>) + Per-cluster setting for configuring <>. This setting takes the encoded value of a + <> and must + be set in the <> on each node in the cluster. + The presence (or not) of this setting determines which model a remote cluster + uses. If present, the remote cluster uses the API key based model. Otherwise, + it uses the certificate based model. If the setting is added, removed, or + updated in the <> and reloaded via the + <> API, the cluster will automatically + rebuild its connection to the remote. [[remote-cluster-sniff-settings]] ==== Sniff mode remote cluster settings diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 2ec574544f9bb..d08da2cfc1d2f 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -47,20 +47,44 @@ different from `transport.port`. Defaults to the port assigned via The connect timeout for initiating a new connection (in time setting format). Defaults to `30s`. +[[transport-settings-compress]] `transport.compress`:: (<>, string) -Set to `true`, `indexing_data`, or `false` to configure transport compression -between nodes. The option `true` will compress all data. The option -`indexing_data` will compress only the raw index data sent between nodes during -ingest, ccr following (excluding bootstrap), and operations based shard recovery -(excluding transferring lucene files). Defaults to `indexing_data`. +Determines which transport requests are compressed before sending them to +another node. {es} will compress transport responses if and only if the +corresponding request was compressed. See also `transport.compression_scheme`, +which specifies the compression scheme which is used. Accepts the following +values: ++ +-- +`false`:: + +No transport requests are compressed. This option uses the most network +bandwidth, but avoids the CPU overhead of compression and decompression. + +`indexing_data`:: + +Compresses only the raw indexing data sent between nodes during ingest, CCR +following (excluding bootstrapping) and operations-based shard recovery +(excluding file-based recovery which copies the raw Lucene data). This option +is a good trade-off between network bandwidth savings and the extra CPU +required for compression and decompression. This option is the default. + +`true`:: + +All transport requests are compressed. This option may perform better than +`indexing_data` in terms of network bandwidth, but will require the most CPU +for compression and decompression work. +-- +[[transport-settings-compression-scheme]] `transport.compression_scheme`:: (<>, string) -Configures the compression scheme for `transport.compress`. The options are -`deflate` or `lz4`. If `lz4` is configured and the remote node has not been -upgraded to a version supporting `lz4`, the traffic will be sent uncompressed. -Defaults to `lz4`. +Configures the compression scheme for requests which are selected for +compression by to the `transport.compress` setting. Accepts either `deflate` or +`lz4`, which offer different trade-offs between compression ratio and CPU +usage. {es} will use the same compression scheme for responses as for the +corresponding requests. Defaults to `lz4`. `transport.tcp.keep_alive`:: (<>, boolean) diff --git a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc index cfad434b890db..0db395ba34b0a 100644 --- a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc +++ b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc @@ -10,8 +10,6 @@ The instructions on this page are for *local development only*. Do not use these instructions for production deployments, because they are not secure. While this approach is convenient for experimenting and learning, you should never run the service in this way in a production environment. - -Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a production environment, including using Docker. ==== The following commands help you very quickly spin up a single-node {es} cluster, together with {kib} in Docker. @@ -174,4 +172,4 @@ Use our <> to learn the basics of {es}: how t This setup is not suitable for production use. For production deployments, we recommend using our managed service on Elastic Cloud. https://cloud.elastic.co/registration[Sign up for a free trial] (no credit card required). -Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. \ No newline at end of file +Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index dd264c0e5bcd2..a2a397c4efe65 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1062,8 +1062,8 @@ end::stats[] tag::stored_fields[] `stored_fields`:: -(Optional, Boolean) If `true`, retrieves the document fields stored in the -index rather than the document `_source`. Defaults to `false`. +(Optional, string) +A comma-separated list of <> to include in the response. end::stored_fields[] tag::sync[] diff --git a/docs/reference/rest-api/security/create-roles.asciidoc b/docs/reference/rest-api/security/create-roles.asciidoc index 4f41c0b54bb1d..75f1d7c799187 100644 --- a/docs/reference/rest-api/security/create-roles.asciidoc +++ b/docs/reference/rest-api/security/create-roles.asciidoc @@ -50,6 +50,9 @@ privilege or action. `cluster`:: (list) A list of cluster privileges. These privileges define the cluster level actions that users with this role are able to execute. +`description`:: (string) A description of the role. +The maximum length is `1000` chars. + `global`:: (object) An object defining global privileges. A global privilege is a form of cluster privilege that is request-aware. Support for global privileges is currently limited to the management of application privileges. @@ -104,6 +107,7 @@ The following example adds a role called `my_admin_role`: -------------------------------------------------- POST /_security/role/my_admin_role { + "description": "Grants full access to all management features within the cluster.", "cluster": ["all"], "indices": [ { diff --git a/docs/reference/rest-api/security/get-roles.asciidoc b/docs/reference/rest-api/security/get-roles.asciidoc index 80f0fd587aae8..3eb5a735194c6 100644 --- a/docs/reference/rest-api/security/get-roles.asciidoc +++ b/docs/reference/rest-api/security/get-roles.asciidoc @@ -61,6 +61,7 @@ GET /_security/role/my_admin_role -------------------------------------------------- { "my_admin_role": { + "description": "Grants full access to all management features within the cluster.", "cluster" : [ "all" ], "indices" : [ { diff --git a/docs/reference/search/search-your-data/cohere-es.asciidoc b/docs/reference/search/search-your-data/cohere-es.asciidoc new file mode 100644 index 0000000000000..f12f23ad2c5dc --- /dev/null +++ b/docs/reference/search/search-your-data/cohere-es.asciidoc @@ -0,0 +1,372 @@ +[[cohere-es]] +=== Tutorial: Using Cohere with {es} +++++ +Using Cohere with {es} +++++ + +The instructions in this tutorial shows you how to compute embeddings with +Cohere using the {infer} API and store them for efficient vector or hybrid +search in {es}. This tutorial will use the Python {es} client to perform the +operations. + +You'll learn how to: + +* create an {infer} endpoint for text embedding using the Cohere service, +* create the necessary index mapping for the {es} index, +* build an {infer} pipeline to ingest documents into the index together with the +embeddings, +* perform hybrid search on the data, +* rerank search results by using Cohere's rerank model, +* design a RAG system with Cohere's Chat API. + +The tutorial uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set. + +Refer to https://docs.cohere.com/docs/elasticsearch-and-cohere[Cohere's tutorial] +for an example using a different data set. + + +[discrete] +[[cohere-es-req]] +==== Requirements + +* A https://cohere.com/[Cohere account], +* an https://www.elastic.co/guide/en/cloud/current/ec-getting-started.html[Elastic Cloud] +account, +* Python 3.7 or higher. + + +[discrete] +[[cohere-es-packages]] +==== Install required packages + +Install {es} and Cohere: + +[source,py] +------------------------------------------------------------ +!pip install elasticsearch +!pip install cohere +------------------------------------------------------------ + +Import the required packages: + +[source,py] +------------------------------------------------------------ +from elasticsearch import Elasticsearch, helpers +import cohere +import json +import requests +------------------------------------------------------------ + +[discrete] +[[cohere-es-client]] +==== Create the {es} client + +To create your {es} client, you need: + +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#finding-your-cloud-id[your Cloud ID], +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#creating-an-api-key[an encoded API key]. + +[source,py] +------------------------------------------------------------ +ELASTICSEARCH_ENDPOINT = "elastic_endpoint" +ELASTIC_API_KEY = "elastic_api_key" + +client = Elasticsearch( + cloud_id=ELASTICSEARCH_ENDPOINT, + api_key=ELASTIC_API_KEY +) + +# Confirm the client has connected +print(client.info()) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-endpoint]] +==== Create the {infer} endpoint + +<> first. In this example, the +{infer} endpoint uses Cohere's `embed-english-v3.0` model and the +`embedding_type` is set to `byte`. + +[source,py] +------------------------------------------------------------ +COHERE_API_KEY = "cohere_api_key" + +client.inference.put_model( + task_type="text_embedding", + inference_id="cohere_embeddings", + body={ + "service": "cohere", + "service_settings": { + "api_key": COHERE_API_KEY, + "model_id": "embed-english-v3.0", + "embedding_type": "byte" + } + }, +) +------------------------------------------------------------ + +You can find your API keys in your Cohere dashboard under the +https://dashboard.cohere.com/api-keys[API keys section]. + + +[discrete] +[[cohere-es-index-mapping]] +==== Create the index mapping + +Create the index mapping for the index that will contain the embeddings. + +[source,py] +------------------------------------------------------------ +client.indices.create( + index="cohere-embeddings", + settings={"index": {"default_pipeline": "cohere_embeddings"}}, + mappings={ + "properties": { + "text_embedding": { + "type": "dense_vector", + "dims": 1024, + "element_type": "byte", + }, + "text": {"type": "text"}, + "id": {"type": "integer"}, + "title": {"type": "text"} + } + }, +) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-pipeline]] +==== Create the {infer} pipeline + +Now you have an {infer} endpoint and an index ready to store embeddings. The +next step is to create an <> with an +<> that will create the embeddings using +the {infer} endpoint and stores them in the index. + +[source,py] +-------------------------------------------------- +client.ingest.put_pipeline( + id="cohere_embeddings", + description="Ingest pipeline for Cohere inference.", + processors=[ + { + "inference": { + "model_id": "cohere_embeddings", + "input_output": { + "input_field": "text", + "output_field": "text_embedding", + }, + } + } + ], +) +-------------------------------------------------- + + +[discrete] +[[cohere-es-insert-documents]] +==== Prepare data and insert documents + +This example uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set that you can find on HuggingFace. + +[source,py] +-------------------------------------------------- +url = 'https://huggingface.co/datasets/mteb/scifact/raw/main/corpus.jsonl' + +# Fetch the JSONL data from the URL +response = requests.get(url) +response.raise_for_status() # Ensure noticing bad responses + +# Split the content by new lines and parse each line as JSON +data = [json.loads(line) for line in response.text.strip().split('\n') if line] +# Now data is a list of dictionaries + +# Change `_id` key to `id` as `_id` is a reserved key in Elasticsearch. +for item in data: + if '_id' in item: + item['id'] = item.pop('_id') + +# Prepare the documents to be indexed +documents = [] +for line in data: + data_dict = line + documents.append({ + "_index": "cohere-embeddings", + "_source": data_dict, + } + ) + +# Use the bulk endpoint to index +helpers.bulk(client, documents) + +print("Data ingestion completed, text embeddings generated!") +-------------------------------------------------- + +Your index is populated with the SciFact data and text embeddings for the text +field. + + +[discrete] +[[cohere-es-hybrid-search]] +==== Hybrid search + +Let's start querying the index! + +The code below performs a hybrid search. The `kNN` query computes the relevance +of search results based on vector similarity using the `text_embedding` field, +the lexical search query uses BM25 retrieval to compute keyword similarity on +the `title` and `text` fields. + +[source,py] +-------------------------------------------------- +query = "What is biosimilarity?" + +response = client.search( + index="cohere-embeddings", + size=100, + knn={ + "field": "text_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "cohere_embeddings", + "model_text": query, + } + }, + "k": 10, + "num_candidates": 50, + }, + query={ + "multi_match": { + "query": query, + "fields": ["text", "title"] + } + } +) + +raw_documents = response["hits"]["hits"] + +# Display the first 10 results +for document in raw_documents[0:10]: + print(f'Title: {document["_source"]["title"]}\nText: {document["_source"]["text"]}\n') + +# Format the documents for ranking +documents = [] +for hit in response["hits"]["hits"]: + documents.append(hit["_source"]["text"]) +-------------------------------------------------- + + +[discrete] +[[cohere-es-rerank-results]] +===== Rerank search results + +To combine the results more effectively, use +https://docs.cohere.com/docs/rerank-2[Cohere's Rerank v3] model through the +{infer} API to provide a more precise semantic reranking of the results. + +Create an {infer} endpoint with your Cohere API key and the used model name as +the `model_id` (`rerank-english-v3.0` in this example). + +[source,py] +-------------------------------------------------- +client.inference.put_model( + task_type="rerank", + inference_id="cohere_rerank", + body={ + "service": "cohere", + "service_settings":{ + "api_key": COHERE_API_KEY, + "model_id": "rerank-english-v3.0" + }, + "task_settings": { + "top_n": 10, + }, + } +) +-------------------------------------------------- + +Rerank the results using the new {infer} endpoint. + +[source,py] +-------------------------------------------------- +# Pass the query and the search results to the service +response = client.inference.inference( + inference_id="cohere_rerank", + body={ + "query": query, + "input": documents, + "task_settings": { + "return_documents": False + } + } +) + +# Reconstruct the input documents based on the index provided in the rereank response +ranked_documents = [] +for document in response.body["rerank"]: + ranked_documents.append({ + "title": raw_documents[int(document["index"])]["_source"]["title"], + "text": raw_documents[int(document["index"])]["_source"]["text"] + }) + +# Print the top 10 results +for document in ranked_documents[0:10]: + print(f"Title: {document['title']}\nText: {document['text']}\n") +-------------------------------------------------- + +The response is a list of documents in descending order of relevance. Each +document has a corresponding index that reflects the order of the documents when +they were sent to the {infer} endpoint. + + +[discrete] +[[cohere-es-rag]] +==== Retrieval Augmented Generation (RAG) with Cohere and {es} + +RAG is a method for generating text using additional information fetched from an +external data source. With the ranked results, you can build a RAG system on the +top of what you previously created by using +https://docs.cohere.com/docs/chat-api[Cohere's Chat API]. + +Pass in the retrieved documents and the query to receive a grounded response +using Cohere's newest generative model +https://docs.cohere.com/docs/command-r-plus[Command R+]. + +Then pass in the query and the documents to the Chat API, and print out the +response. + +[source,py] +-------------------------------------------------- +response = co.chat(message=query, documents=ranked_documents, model='command-r-plus') + +source_documents = [] +for citation in response.citations: + for document_id in citation.document_ids: + if document_id not in source_documents: + source_documents.append(document_id) + +print(f"Query: {query}") +print(f"Response: {response.text}") +print("Sources:") +for document in response.documents: + if document['id'] in source_documents: + print(f"{document['title']}: {document['text']}") + +-------------------------------------------------- + +The response will look similar to this: + +[source,consol-result] +-------------------------------------------------- +Query: What is biosimilarity? +Response: Biosimilarity is based on the comparability concept, which has been used successfully for several decades to ensure close similarity of a biological product before and after a manufacturing change. Over the last 10 years, experience with biosimilars has shown that even complex biotechnology-derived proteins can be copied successfully. +Sources: +Interchangeability of Biosimilars: A European Perspective: (...) +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index a4d892c98645b..a1197e7bbbd3a 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -136,3 +136,4 @@ include::{es-ref-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[ include::semantic-search-elser.asciidoc[] include::semantic-search-inference.asciidoc[] +include::cohere-es.asciidoc[] diff --git a/docs/reference/searchable-snapshots/index.asciidoc b/docs/reference/searchable-snapshots/index.asciidoc index 4a56961246c2b..794496c8b24ad 100644 --- a/docs/reference/searchable-snapshots/index.asciidoc +++ b/docs/reference/searchable-snapshots/index.asciidoc @@ -310,9 +310,9 @@ of {search-snap} indices. The sole copy of the data in a {search-snap} index is the underlying snapshot, stored in the repository. For example: -* You cannot unregister a repository while any of the searchable snapshots it -contains are mounted in {es}. You also cannot delete a snapshot if any of its -indices are mounted as a searchable snapshot in the same cluster. +* You must not unregister a repository while any of the searchable snapshots it +contains are mounted in {es}. You also must not delete a snapshot if any of its +indices are mounted as searchable snapshots. * If you mount indices from snapshots held in a repository to which a different cluster has write access then you must make sure that the other cluster does not diff --git a/docs/reference/security/authorization/privileges.asciidoc b/docs/reference/security/authorization/privileges.asciidoc index 9153b5fbdcab3..be30db4d100bd 100644 --- a/docs/reference/security/authorization/privileges.asciidoc +++ b/docs/reference/security/authorization/privileges.asciidoc @@ -85,6 +85,9 @@ All {Ilm} operations related to managing policies. `manage_index_templates`:: All operations on index templates. +`manage_inference`:: +All operations related to managing {infer}. + `manage_ingest_pipelines`:: All operations on ingest pipelines. @@ -192,6 +195,9 @@ node info, node and cluster stats, and pending cluster tasks. `monitor_enrich`:: All read-only operations related to managing and executing enrich policies. +`monitor_inference`:: +All read-only operations related to {infer}. + `monitor_ml`:: All read-only {ml} operations, such as getting information about {dfeeds}, jobs, model snapshots, or results. diff --git a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc index 47403df450bd2..93edc0918614d 100644 --- a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc @@ -1,7 +1,7 @@ // tag::elser[] Hybrid search between a semantic and lexical query can be achieved by using an -< as part of your search request. Provide a +<> as part of your search request. Provide a `text_expansion` query and a full-text query as <> for the `rrf` retriever. The `rrf` retriever uses <> to rank the top documents. diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index 01ef39b69c529..ceff8619062c4 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -138,3 +138,5 @@ include::troubleshooting/troubleshooting-searches.asciidoc[] include::troubleshooting/troubleshooting-shards-capacity.asciidoc[] include::troubleshooting/troubleshooting-unbalanced-cluster.asciidoc[] + +include::troubleshooting/diagnostic.asciidoc[] diff --git a/docs/reference/troubleshooting/diagnostic.asciidoc b/docs/reference/troubleshooting/diagnostic.asciidoc new file mode 100644 index 0000000000000..a944ca88d285d --- /dev/null +++ b/docs/reference/troubleshooting/diagnostic.asciidoc @@ -0,0 +1,152 @@ +[[diagnostic]] +== Capturing diagnostics +++++ +Capture diagnostics +++++ +:keywords: Elasticsearch diagnostic, diagnostics + +The {es} https://github.com/elastic/support-diagnostics[Support Diagnostic] tool captures a point-in-time snapshot of cluster statistics and most settings. +It works against all {es} versions. + +This information can be used to troubleshoot problems with your cluster. For examples of issues that you can troubleshoot using Support Diagnostic tool output, refer to https://www.elastic.co/blog/why-does-elastic-support-keep-asking-for-diagnostic-files[the Elastic blog]. + +You can generate diagnostic information using this tool before you contact https://support.elastic.co[Elastic Support] or +https://discuss.elastic.co[Elastic Discuss] to minimize turnaround time. + +[discrete] +[[diagnostic-tool-requirements]] +=== Requirements + +- Java Runtime Environment or Java Development Kit v1.8 or higher + +[discrete] +[[diagnostic-tool-access]] +=== Access the tool + +The Support Diagnostic tool is included as a sub-library in some Elastic deployments: + +* {ece}: Located under **{ece}** > **Deployment** > **Operations** > +**Prepare Bundle** > **{es}**. +* {eck}: Run as https://www.elastic.co/guide/en/cloud-on-k8s/current/k8s-take-eck-dump.html[`eck-diagnostics`]. + +You can also directly download the `diagnostics-X.X.X-dist.zip` file for the latest Support Diagnostic release +from https://github.com/elastic/support-diagnostics/releases/latest[the `support-diagnostic` repo]. + + +[discrete] +[[diagnostic-capture]] +=== Capture diagnostic information + +To capture an {es} diagnostic: + +. In a terminal, verify that your network and user permissions are sufficient to connect to your {es} +cluster by polling the cluster's <>. ++ +For example, with the parameters `host:localhost`, `port:9200`, and `username:elastic`, you'd use the following curl request: ++ +[source,sh] +---- +curl -X GET -k -u elastic -p https://localhost:9200/_cluster/health +---- +// NOTCONSOLE ++ +If you receive a an HTTP 200 `OK` response, then you can proceed to the next step. If you receive a different +response code, then <> before proceeding. + +. Using the same environment parameters, run the diagnostic tool script. ++ +For information about the parameters that you can pass to the tool, refer to the https://github.com/elastic/support-diagnostics#standard-options[diagnostic +parameter reference]. ++ +The following command options are recommended: ++ +**Unix-based systems** ++ +[source,sh] +---- +sudo ./diagnostics.sh --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +**Windows** ++ +[source,sh] +---- +sudo .\diagnostics.bat --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +[TIP] +.Script execution modes +==== +You can execute the script in three https://github.com/elastic/support-diagnostics#diagnostic-types[modes]: + +* `local` (default, recommended): Polls the <>, +gathers operating system info, and captures cluster and GC logs. + +* `remote`: Establishes an ssh session +to the applicable target server to pull the same information as `local`. + +* `api`: Polls the <>. All other data must be +collected manually. +==== + +. When the script has completed, verify that no errors were logged to `diagnostic.log`. +If the log file contains errors, then refer to <>. + +. If the script completed without errors, then an archive with the format `-diagnostics-.zip` is created in the working directory, or an output directory you have specified. You can review or share the diagnostic archive as needed. + +[discrete] +[[diagnostic-non-200]] +=== Diagnose a non-200 cluster health response + +When you poll your cluster health, if you receive any response other than `200 0K`, then the diagnostic tool +might not work as intended. The following are possible error codes and their resolutions: + +HTTP 401 `UNAUTHENTICATED`:: +Additional information in the error will usually indicate either +that your `username:password` pair is invalid, or that your `.security` +index is unavailable and you need to setup a temporary +<> user with `role:superuser` to authenticate. + +HTTP 403 `UNAUTHORIZED`:: +Your `username` is recognized but +has insufficient permissions to run the diagnostic. Either use a different +username or elevate the user's privileges. + +HTTP 429 `TOO_MANY_REQUESTS` (for example, `circuit_breaking_exception`):: +Your username authenticated and authorized, but the cluster is under +sufficiently high strain that it's not responding to API calls. These +responses are usually intermittent. You can proceed with running the diagnostic, +but the diagnostic results might be incomplete. + +HTTP 504 `BAD_GATEWAY`:: +Your network is experiencing issues reaching the cluster. You might be using a proxy or firewall. +Consider running the diagnostic tool from a different location, confirming your port, or using an IP +instead of a URL domain. + +HTTP 503 `SERVICE_UNAVAILABLE` (for example, `master_not_discovered_exception`):: +Your cluster does not currently have an elected master node, which is +required for it to be API-responsive. This might be temporary while the master +node rotates. If the issue persists, then <> +before proceeding. + +[discrete] +[[diagnostic-log-errors]] +=== Diagnose errors in `diagnostic.log` + +The following are common errors that you might encounter when running the diagnostic tool: + +* `Error: Could not find or load main class com.elastic.support.diagnostics.DiagnosticApp` ++ +This indicates that you accidentally downloaded the source code file +instead of `diagnostics-X.X.X-dist.zip` from the releases page. + +* `Could not retrieve the Elasticsearch version due to a system or network error - unable to continue.` ++ +This indicates that the diagnostic couldn't run commands against the cluster. +Poll the cluster's health again, and ensure that you're using the same parameters +when you run the dianostic batch or shell file. + +* A `security_exception` that includes `is unauthorized for user`: ++ +The provided user has insufficient admin permissions to run the diagnostic tool. Use another +user, or grant the user `role:superuser` privileges. \ No newline at end of file diff --git a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java index 68a4a136c5308..83a68c984a684 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java +++ b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java @@ -46,4 +46,30 @@ public String toString() { }; } + + /** + * Returns an empty iterator over the supplied value. + */ + static ReleasableIterator empty() { + return new ReleasableIterator<>() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public T next() { + assert false : "hasNext is always false so next should never be called"; + return null; + } + + @Override + public void close() {} + + @Override + public String toString() { + return "ReleasableIterator[]"; + } + }; + } } diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index 168eb533fea74..7a545787bbdae 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -18,7 +18,7 @@ configurations { } var zstdVersion = "1.5.5" -var vecVersion = "1.0.6" +var vecVersion = "1.0.8" repositories { exclusiveContent { diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java index 56017d3a8a20a..c390cfc9289c6 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java @@ -45,7 +45,15 @@ public Optional getVectorSimilarityFunctions() { } static boolean isNativeVectorLibSupported() { - return Runtime.version().feature() >= 21 && isMacOrLinuxAarch64() && checkEnableSystemProperty(); + return Runtime.version().feature() >= 21 && (isMacOrLinuxAarch64() || isLinuxAmd64()) && checkEnableSystemProperty(); + } + + /** + * Returns true iff the architecture is x64 (amd64) and the OS Linux (the OS we currently support for the native lib). + */ + static boolean isLinuxAmd64() { + String name = System.getProperty("os.name"); + return (name.startsWith("Linux")) && System.getProperty("os.arch").equals("amd64"); } /** Returns true iff the OS is Mac or Linux, and the architecture is aarch64. */ diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 5313984ac6d61..0af87154960ad 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -17,7 +17,10 @@ import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_INT; @@ -26,31 +29,49 @@ class JdkSystemdLibrary implements SystemdLibrary { static { - System.load(findLibSystemd()); - } - - // On some systems libsystemd does not have a non-versioned symlink. System.loadLibrary only knows how to find - // non-versioned library files. So we must manually check the library path to find what we need. - static String findLibSystemd() { - final String libsystemd = "libsystemd.so.0"; - String libpath = System.getProperty("java.library.path"); - for (String basepathStr : libpath.split(":")) { - var basepath = Paths.get(basepathStr); - if (Files.exists(basepath) == false) { - continue; + // Find and load libsystemd. We attempt all instances of + // libsystemd in case of multiarch systems, and stop when + // one is successfully loaded. If none can be loaded, + // UnsatisfiedLinkError will be thrown. + List paths = findLibSystemd(); + if (paths.isEmpty()) { + String libpath = System.getProperty("java.library.path"); + throw new UnsatisfiedLinkError("Could not find libsystemd in java.library.path: " + libpath); + } + UnsatisfiedLinkError last = null; + for (String path : paths) { + try { + System.load(path); + last = null; + break; + } catch (UnsatisfiedLinkError e) { + last = e; } - try (var stream = Files.walk(basepath)) { + } + if (last != null) { + throw last; + } + } - var foundpath = stream.filter(Files::isDirectory).map(p -> p.resolve(libsystemd)).filter(Files::exists).findAny(); - if (foundpath.isPresent()) { - return foundpath.get().toAbsolutePath().toString(); - } + // findLibSystemd returns a list of paths to instances of libsystemd + // found within java.library.path. + static List findLibSystemd() { + // Note: on some systems libsystemd does not have a non-versioned symlink. + // System.loadLibrary only knows how to find non-versioned library files, + // so we must manually check the library path to find what we need. + final Path libsystemd = Paths.get("libsystemd.so.0"); + final String libpath = System.getProperty("java.library.path"); + return Arrays.stream(libpath.split(":")).map(Paths::get).filter(Files::exists).flatMap(p -> { + try { + return Files.find( + p, + Integer.MAX_VALUE, + (fp, attrs) -> (attrs.isDirectory() == false && fp.getFileName().equals(libsystemd)) + ); } catch (IOException e) { throw new UncheckedIOException(e); } - - } - throw new UnsatisfiedLinkError("Could not find " + libsystemd + " in java.library.path: " + libpath); + }).map(p -> p.toAbsolutePath().toString()).toList(); } private static final MethodHandle sd_notify$mh = downcallHandle("sd_notify", FunctionDescriptor.of(JAVA_INT, JAVA_INT, ADDRESS)); diff --git a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java index adf32874c04f1..8c4cbb688abcd 100644 --- a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java +++ b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java @@ -37,7 +37,9 @@ public boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && ((arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) + || (arch.equals("amd64") && osName.equals("Linux")))) { assertThat(vectorSimilarityFunctions, isPresent()); return true; } else { diff --git a/libs/vec/native/Dockerfile b/libs/vec/native/Dockerfile index 25dcf4d4854d0..66eb7e92ef479 100644 --- a/libs/vec/native/Dockerfile +++ b/libs/vec/native/Dockerfile @@ -4,6 +4,7 @@ RUN apt update RUN apt install -y gcc g++ openjdk-17-jdk COPY . /workspace WORKDIR /workspace -RUN ./gradlew --quiet --console=plain clean vecSharedLibrary +RUN ./gradlew --quiet --console=plain clean buildSharedLibrary +RUN strip --strip-unneeded build/output/libvec.so -CMD cat build/libs/vec/shared/libvec.so +CMD cat build/output/libvec.so diff --git a/libs/vec/native/build.gradle b/libs/vec/native/build.gradle index 6a658da0644b7..7edf46d406862 100644 --- a/libs/vec/native/build.gradle +++ b/libs/vec/native/build.gradle @@ -12,9 +12,10 @@ var os = org.gradle.internal.os.OperatingSystem.current() // To update this library run publish_vec_binaries.sh ( or ./gradlew vecSharedLibrary ) // Or // For local development, build the docker image with: -// docker build --platform linux/arm64 --progress=plain . +// docker build --platform linux/arm64 --progress=plain . (for aarch64) +// docker build --platform linux/amd64 --progress=plain . (for x64) // Grab the image id from the console output, then, e.g. -// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/libvec.so +// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/$arch/libvec.so // // To run tests and benchmarks on a locally built libvec, // 1. Temporarily comment out the download in libs/native/library/build.gradle @@ -30,26 +31,83 @@ var os = org.gradle.internal.os.OperatingSystem.current() group = 'org.elasticsearch' +def platformName = System.getProperty("os.arch"); + model { + platforms { + aarch64 { + architecture "aarch64" + } + amd64 { + architecture "x86-64" + } + } toolChains { gcc(Gcc) { target("aarch64") { cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=armv8-a"]) } + } + target("amd64") { + cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2", "-Wno-incompatible-pointer-types"]) } } } - clang(Clang) - } - platforms { - aarch64 { - architecture "aarch64" + cl(VisualCpp) { + eachPlatform { toolchain -> + def platform = toolchain.getPlatform() + if (platform.name == "x64") { + cCompiler.withArguments { args -> args.addAll(["/O2", "/LD", "-march=core-avx2"]) } + } + } + } + clang(Clang) { + target("amd64") { + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2"]) } + } } } components { vec(NativeLibrarySpec) { targetPlatform "aarch64" - binaries.withType(SharedLibraryBinarySpec) { - cCompiler.args "-O3", "-std=c99", "-march=armv8-a" + targetPlatform "amd64" + + sources { + c { + source { + srcDir "src/vec/c/${platformName}/" + include "*.c" + } + exportedHeaders { + srcDir "src/vec/headers/" + } + } + } + } + } +} + +tasks.register('buildSharedLibrary') { + description = 'Assembles native shared library for the host architecture' + if (platformName.equals("aarch64")) { + dependsOn tasks.vecAarch64SharedLibrary + doLast { + copy { + from tasks.linkVecAarch64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' + } + } + } else if (platformName.equals("amd64")) { + dependsOn tasks.vecAmd64SharedLibrary + doLast { + copy { + from tasks.linkVecAmd64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' } } + } else { + throw new GradleException("Unsupported platform: " + platformName) } } diff --git a/libs/vec/native/publish_vec_binaries.sh b/libs/vec/native/publish_vec_binaries.sh index e17690160e253..2ed6c750ab9e8 100755 --- a/libs/vec/native/publish_vec_binaries.sh +++ b/libs/vec/native/publish_vec_binaries.sh @@ -19,7 +19,7 @@ if [ -z "$ARTIFACTORY_API_KEY" ]; then exit 1; fi -VERSION="1.0.6" +VERSION="1.0.8" ARTIFACTORY_REPOSITORY="${ARTIFACTORY_REPOSITORY:-https://artifactory.elastic.dev/artifactory/elasticsearch-native/}" TEMP=$(mktemp -d) @@ -29,16 +29,22 @@ if curl -sS -I --fail --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/ve fi echo 'Building Darwin binary...' -./gradlew --quiet --console=plain vecSharedLibrary +./gradlew --quiet --console=plain vecAarch64SharedLibrary echo 'Building Linux binary...' DOCKER_IMAGE=$(docker build --platform linux/arm64 --quiet .) -docker run $DOCKER_IMAGE > build/libs/vec/shared/libvec.so +docker run $DOCKER_IMAGE > build/libs/vec/shared/aarch64/libvec.so + +echo 'Building Linux x64 binary...' +DOCKER_IMAGE=$(docker build --platform linux/amd64 --quiet .) +docker run --platform linux/amd64 $DOCKER_IMAGE > build/libs/vec/shared/amd64/libvec.so mkdir -p $TEMP/darwin-aarch64 mkdir -p $TEMP/linux-aarch64 -cp build/libs/vec/shared/libvec.dylib $TEMP/darwin-aarch64/ -cp build/libs/vec/shared/libvec.so $TEMP/linux-aarch64/ +mkdir -p $TEMP/linux-x64 +cp build/libs/vec/shared/aarch64/libvec.dylib $TEMP/darwin-aarch64/ +cp build/libs/vec/shared/aarch64/libvec.so $TEMP/linux-aarch64/ +cp build/libs/vec/shared/amd64/libvec.so $TEMP/linux-x64/ echo 'Uploading to Artifactory...' (cd $TEMP && zip -rq - .) | curl -sS -X PUT -H "X-JFrog-Art-Api: ${ARTIFACTORY_API_KEY}" --data-binary @- --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/vec/${VERSION}/vec-${VERSION}.zip" diff --git a/libs/vec/native/src/vec/c/vec.c b/libs/vec/native/src/vec/c/aarch64/vec.c similarity index 99% rename from libs/vec/native/src/vec/c/vec.c rename to libs/vec/native/src/vec/c/aarch64/vec.c index 05dfe64a3be9b..478e5e84d3859 100644 --- a/libs/vec/native/src/vec/c/vec.c +++ b/libs/vec/native/src/vec/c/aarch64/vec.c @@ -121,7 +121,7 @@ static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { int32_t res = 0; int i = 0; - if (i > SQR7U_STRIDE_BYTES_LEN) { + if (dims > SQR7U_STRIDE_BYTES_LEN) { i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); res = sqr7u_inner(a, b, i); } diff --git a/libs/vec/native/src/vec/c/amd64/vec.c b/libs/vec/native/src/vec/c/amd64/vec.c new file mode 100644 index 0000000000000..c9a49ad2d1d4d --- /dev/null +++ b/libs/vec/native/src/vec/c/amd64/vec.c @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +#include +#include +#include "vec.h" + +#include +#include + +#ifndef DOT7U_STRIDE_BYTES_LEN +#define DOT7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifndef SQR7U_STRIDE_BYTES_LEN +#define SQR7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifdef _MSC_VER +#include +#elif __GNUC__ +#include +#elif __clang__ +#include +#endif + +// Multi-platform CPUID "intrinsic"; it takes as input a "functionNumber" (or "leaf", the eax registry). "Subleaf" +// is always 0. Output is stored in the passed output parameter: output[0] = eax, output[1] = ebx, output[2] = ecx, +// output[3] = edx +static inline void cpuid(int output[4], int functionNumber) { +#if defined(__GNUC__) || defined(__clang__) + // use inline assembly, Gnu/AT&T syntax + int a, b, c, d; + __asm("cpuid" : "=a"(a), "=b"(b), "=c"(c), "=d"(d) : "a"(functionNumber), "c"(0) : ); + output[0] = a; + output[1] = b; + output[2] = c; + output[3] = d; + +#elif defined (_MSC_VER) + __cpuidex(output, functionNumber, 0); +#else + #error Unsupported compiler +#endif +} + +// Utility function to horizontally add 8 32-bit integers +static inline int hsum_i32_8(const __m256i a) { + const __m128i sum128 = _mm_add_epi32(_mm256_castsi256_si128(a), _mm256_extractf128_si256(a, 1)); + const __m128i hi64 = _mm_unpackhi_epi64(sum128, sum128); + const __m128i sum64 = _mm_add_epi32(hi64, sum128); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +EXPORT int vec_caps() { + int cpuInfo[4] = {-1}; + // Calling __cpuid with 0x0 as the function_id argument + // gets the number of the highest valid function ID. + cpuid(cpuInfo, 0); + int functionIds = cpuInfo[0]; + if (functionIds >= 7) { + cpuid(cpuInfo, 7); + int ebx = cpuInfo[1]; + // AVX2 flag is the 5th bit + // We assume that all processors that have AVX2 also have FMA3 + return (ebx & (1 << 5)) != 0; + } + return 0; +} + +static inline int32_t dot7u_inner(int8_t* a, int8_t* b, size_t dims) { + const __m256i ones = _mm256_set1_epi16(1); + + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += DOT7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + // Perform multiplication and create 16-bit values + // Vertically multiply each unsigned 8-bit integer from va with the corresponding + // 8-bit integer from vb, producing intermediate signed 16-bit integers. + const __m256i vab = _mm256_maddubs_epi16(va1, vb1); + // Horizontally add adjacent pairs of intermediate signed 16-bit integers, and pack the results. + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, vab), acc1); + } + + // reduce (horizontally add all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t dot7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > DOT7U_STRIDE_BYTES_LEN) { + i += dims & ~(DOT7U_STRIDE_BYTES_LEN - 1); + res = dot7u_inner(a, b, i); + } + for (; i < dims; i++) { + res += a[i] * b[i]; + } + return res; +} + +static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + + const __m256i ones = _mm256_set1_epi16(1); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += SQR7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + const __m256i dist1 = _mm256_sub_epi8(va1, vb1); + const __m256i abs_dist1 = _mm256_sign_epi8(dist1, dist1); + const __m256i sqr1 = _mm256_maddubs_epi16(abs_dist1, abs_dist1); + + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, sqr1), acc1); + } + + // reduce (accumulate all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > SQR7U_STRIDE_BYTES_LEN) { + i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); + res = sqr7u_inner(a, b, i); + } + for (; i < dims; i++) { + int32_t dist = a[i] - b[i]; + res += dist * dist; + } + return res; +} + diff --git a/libs/vec/native/src/vec/headers/vec.h b/libs/vec/native/src/vec/headers/vec.h index 5d3806dfccbe6..49fa29ec6fae9 100644 --- a/libs/vec/native/src/vec/headers/vec.h +++ b/libs/vec/native/src/vec/headers/vec.h @@ -7,7 +7,7 @@ */ #ifdef _MSC_VER -#define EXPORT extern "C" __declspec(dllexport) +#define EXPORT __declspec(dllexport) #elif defined(__GNUC__) && !defined(__clang__) #define EXPORT __attribute__((externally_visible,visibility("default"))) #elif __clang__ diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java index 2e60079da8649..2be0aa53f7c57 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java @@ -78,7 +78,7 @@ protected final void checkOrdinal(int ord) { } } - protected final float fallbackScore(int firstByteOffset, int secondByteOffset) throws IOException { + protected final float fallbackScore(long firstByteOffset, long secondByteOffset) throws IOException { input.seek(firstByteOffset); byte[] a = new byte[dims]; input.readBytes(a, 0, a.length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java index f92bf0b52ed07..5231bb8e3c67f 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); input.seek(firstByteOffset + length); @@ -47,10 +47,11 @@ public float score(int firstOrd, int secondOrd) throws IOException { if (firstSeg != null && secondSeg != null) { int dotProduct = dotProduct7u(firstSeg, secondSeg, length); + assert dotProduct >= 0; float adjustedDistance = dotProduct * scoreCorrectionConstant + firstOffset + secondOffset; - return (1 + adjustedDistance) / 2; + return Math.max((1 + adjustedDistance) / 2, 0f); } else { - return fallbackScore(firstByteOffset, secondByteOffset); + return Math.max(fallbackScore(firstByteOffset, secondByteOffset), 0f); } } } diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java index e1f16c6909cf4..55b08a899bd7c 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); MemorySegment secondSeg = segmentSlice(secondByteOffset, length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java index bd6fc921f1832..5cdfc62bc9071 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); input.seek(firstByteOffset + length); diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java index 771f665fb4084..13f2d5a03ec76 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java @@ -39,7 +39,9 @@ public static boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && (arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux")) + || arch.equals("amd64") && osName.equals("Linux"))) { assertThat(factory, isPresent()); return true; } else { diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java index 115cf8e8cf9f8..07d30a887c683 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.vec; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -17,6 +19,8 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Objects; +import java.util.Random; import java.util.function.Function; import static org.elasticsearch.vec.VectorSimilarityType.COSINE; @@ -24,6 +28,7 @@ import static org.elasticsearch.vec.VectorSimilarityType.EUCLIDEAN; import static org.elasticsearch.vec.VectorSimilarityType.MAXIMUM_INNER_PRODUCT; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; // @com.carrotsearch.randomizedtesting.annotations.Repeat(iterations = 100) public class VectorScorerFactoryTests extends AbstractVectorTestCase { @@ -92,6 +97,51 @@ void testSimpleImpl(long maxChunkSize) throws IOException { } } + public void testNonNegativeDotProduct() throws IOException { + assumeTrue(notSupportedMsg(), supported()); + var factory = AbstractVectorTestCase.factory.get(); + + try (Directory dir = new MMapDirectory(createTempDir(getTestName()), MMapDirectory.DEFAULT_MAX_CHUNK_SIZE)) { + // keep vecs `0` so dot product is `0` + byte[] vec1 = new byte[32]; + byte[] vec2 = new byte[32]; + String fileName = getTestName() + "-32"; + try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { + var negativeOffset = floatToByteArray(-5f); + byte[] bytes = concat(vec1, negativeOffset, vec2, negativeOffset); + out.writeBytes(bytes, 0, bytes.length); + } + try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + // dot product + float expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(DOT_PRODUCT, vec1, vec2, + // 1, -5, -5); + var scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, DOT_PRODUCT, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // max inner product + expected = luceneScore(MAXIMUM_INNER_PRODUCT, vec1, vec2, 1, -5, -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, MAXIMUM_INNER_PRODUCT, in).get(); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // cosine + expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(COSINE, vec1, vec2, 1, -5, + // -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, COSINE, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // euclidean + expected = luceneScore(EUCLIDEAN, vec1, vec2, 1, -5, -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, EUCLIDEAN, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + } + } + } + public void testRandom() throws IOException { assumeTrue(notSupportedMsg(), supported()); testRandom(MMapDirectory.DEFAULT_MAX_CHUNK_SIZE, BYTE_ARRAY_RANDOM_INT7_FUNC); @@ -226,6 +276,67 @@ void testRandomSliceImpl(int dims, long maxChunkSize, int initialPadding, Functi } } + // Tests with a large amount of data (> 2GB), which ensures that data offsets do not overflow + @Nightly + public void testLarge() throws IOException { + var factory = AbstractVectorTestCase.factory.get(); + + try (Directory dir = new MMapDirectory(createTempDir(getTestName()))) { + final int dims = 8192; + final int size = 262144; + final float correction = randomFloat(); + + String fileName = getTestName() + "-" + dims; + logger.info("Testing " + fileName); + try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { + for (int i = 0; i < size; i++) { + var vec = vector(i, dims); + var off = (float) i; + out.writeBytes(vec, 0, vec.length); + out.writeInt(Float.floatToIntBits(off)); + } + } + try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + for (int times = 0; times < TIMES; times++) { + int idx0 = randomIntBetween(0, size - 1); + int idx1 = size - 1; + float off0 = (float) idx0; + float off1 = (float) idx1; + // dot product + float expected = luceneScore(DOT_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + var scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, DOT_PRODUCT, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // max inner product + expected = luceneScore(MAXIMUM_INNER_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, MAXIMUM_INNER_PRODUCT, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // cosine + expected = luceneScore(COSINE, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, COSINE, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // euclidean + expected = luceneScore(EUCLIDEAN, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, EUCLIDEAN, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + } + } + } + } + + // creates the vector based on the given ordinal, which is reproducible given the ord and dims + static byte[] vector(int ord, int dims) { + var random = new Random(Objects.hash(ord, dims)); + byte[] ba = new byte[dims]; + for (int i = 0; i < dims; i++) { + ba[i] = (byte) RandomNumbers.randomIntBetween(random, MIN_INT7_VALUE, MAX_INT7_VALUE); + } + return ba; + } + static Function BYTE_ARRAY_RANDOM_INT7_FUNC = size -> { byte[] ba = new byte[size]; randomBytesBetween(ba, MIN_INT7_VALUE, MAX_INT7_VALUE); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index cf4eaab763011..2b1a8e1c0e318 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1779,7 +1779,14 @@ public void testRemoveGhostReference() throws Exception { public ClusterState execute(ClusterState currentState) throws Exception { DataStream original = currentState.getMetadata().dataStreams().get(dataStreamName); DataStream broken = original.copy() - .setIndices(List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1))) + .setBackingIndices( + original.getBackingIndices() + .copy() + .setIndices( + List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1)) + ) + .build() + ) .build(); brokenDataStreamHolder.set(broken); return ClusterState.builder(currentState) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java index 1d8de6b9ac5f6..27cd5697fd0f7 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java @@ -58,7 +58,7 @@ public void setup() throws IOException { assertThat(dataStreams.size(), is(1)); Map dataStream = (Map) dataStreams.get(0); assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); - List backingIndices = getBackingIndices(dataStream); + List backingIndices = getIndices(dataStream); assertThat(backingIndices.size(), is(1)); List failureStore = getFailureStore(dataStream); assertThat(failureStore.size(), is(1)); @@ -199,18 +199,16 @@ public void testPutIndexMappingApi() throws IOException { } } - private List getBackingIndices(Map response) { - return getIndices(response, "indices"); - } - + @SuppressWarnings("unchecked") private List getFailureStore(Map response) { - return getIndices(response, "failure_indices"); + var failureStore = (Map) response.get("failure_store"); + return getIndices(failureStore); } @SuppressWarnings("unchecked") - private List getIndices(Map response, String fieldName) { - List> indices = (List>) response.get(fieldName); + private List getIndices(Map response) { + List> indices = (List>) response.get("indices"); return indices.stream().map(index -> index.get("index_name")).toList(); } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index 721630d29b4c9..464a11ce8a062 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -37,7 +37,7 @@ public Set getFeatures() { DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE, // Added in 8.12 LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER, // Added in 8.13 DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE, - DataStreamGlobalRetention.GLOBAL_RETENTION // Added in 8.14 + DataStreamGlobalRetention.GLOBAL_RETENTION // Added in 8.14 ); } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 88e529ec5569b..f5fa0db839230 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettingProvider; @@ -56,11 +57,11 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider { @Override public Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { if (dataStreamName != null) { @@ -70,13 +71,13 @@ public Settings getAdditionalIndexSettings( // so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES boolean migrating = dataStream != null && (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD) - && timeSeries; + && isTimeSeries; IndexMode indexMode; if (migrating) { indexMode = IndexMode.TIME_SERIES; } else if (dataStream != null) { - indexMode = timeSeries ? dataStream.getIndexMode() : null; - } else if (timeSeries) { + indexMode = isTimeSeries ? dataStream.getIndexMode() : null; + } else if (isTimeSeries) { indexMode = IndexMode.TIME_SERIES; } else { indexMode = null; @@ -84,8 +85,8 @@ public Settings getAdditionalIndexSettings( if (indexMode != null) { if (indexMode == IndexMode.TIME_SERIES) { Settings.Builder builder = Settings.builder(); - TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(allSettings); - TimeValue lookBackTime = DataStreamsPlugin.LOOK_BACK_TIME.get(allSettings); + TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(indexTemplateAndCreateRequestSettings); + TimeValue lookBackTime = DataStreamsPlugin.LOOK_BACK_TIME.get(indexTemplateAndCreateRequestSettings); final Instant start; final Instant end; if (dataStream == null || migrating) { @@ -114,9 +115,13 @@ public Settings getAdditionalIndexSettings( builder.put(IndexSettings.TIME_SERIES_START_TIME.getKey(), FORMATTER.format(start)); builder.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), FORMATTER.format(end)); - if (allSettings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey()) == false + if (indexTemplateAndCreateRequestSettings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey()) == false && combinedTemplateMappings.isEmpty() == false) { - List routingPaths = findRoutingPaths(indexName, allSettings, combinedTemplateMappings); + List routingPaths = findRoutingPaths( + indexName, + indexTemplateAndCreateRequestSettings, + combinedTemplateMappings + ); if (routingPaths.isEmpty() == false) { builder.putList(INDEX_ROUTING_PATH.getKey(), routingPaths); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java index c3e8331b856fd..a614a2dc40e25 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java @@ -155,7 +155,7 @@ static ClusterState removeDataStream( DataStream dataStream = currentState.metadata().dataStreams().get(dataStreamName); assert dataStream != null; backingIndicesToRemove.addAll(dataStream.getIndices()); - backingIndicesToRemove.addAll(dataStream.getFailureIndices()); + backingIndicesToRemove.addAll(dataStream.getFailureIndices().getIndices()); } // first delete the data streams and then the indices: diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java index f7064eb39a015..8017b1c72f862 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java @@ -145,8 +145,8 @@ static GetDataStreamAction.Response innerOperation( Map backingIndicesSettingsValues = new HashMap<>(); Metadata metadata = state.getMetadata(); collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getIndices()); - if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.getFailureIndices().isEmpty() == false) { - collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getFailureIndices()); + if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.getFailureIndices().getIndices().isEmpty() == false) { + collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getFailureIndices().getIndices()); } GetDataStreamAction.Response.TimeSeries timeSeries = null; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java index a6060923bd396..e3cdd6a8c14d9 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java @@ -64,7 +64,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public boolean dryRun() { return dryRun; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java index 3fe9ae0758a91..3bd100a106dd6 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java @@ -48,6 +48,7 @@ public void writeTo(StreamOutput out) throws IOException { } public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java index 51eb9e7e7e944..5816823ed710a 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java @@ -47,7 +47,9 @@ private GetDataStreamGlobalRetentionAction() {/* no instances */} public static final class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java index a30af402a9186..cc61c7fe664be 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java @@ -43,7 +43,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java index 2aa5b4b4d3acd..65ca34a99da23 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java @@ -108,6 +108,7 @@ public void writeTo(StreamOutput out) throws IOException { } public Request(@Nullable TimeValue defaultRetention, @Nullable TimeValue maxRetention) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.globalRetention = new DataStreamGlobalRetention(defaultRetention, maxRetention); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java index 66133e9fbe0f2..4b0eaa6c46baf 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java @@ -139,7 +139,9 @@ public void testUpdateTimeSeriesTemporalRange_NoUpdateBecauseReplicated() { List.of(new Tuple<>(start.minus(4, ChronoUnit.HOURS), start), new Tuple<>(start, end)) ).getMetadata(); DataStream d = metadata.dataStreams().get(dataStreamName); - metadata = Metadata.builder(metadata).put(d.copy().setReplicated(true).setRolloverOnWrite(false).build()).build(); + metadata = Metadata.builder(metadata) + .put(d.copy().setReplicated(true).setBackingIndices(d.getBackingIndices().copy().setRolloverOnWrite(false).build()).build()) + .build(); now = now.plus(1, ChronoUnit.HOURS); ClusterState in = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index ec6e624794a03..4059127b5eb85 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -82,7 +82,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti .setIndexMode(IndexMode.STANDARD) .setLifecycle(new DataStreamLifecycle()) .setFailureStoreEnabled(true) - .setFailureIndices(failureStores) + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) .build(); String ilmPolicyName = "rollover-30days"; @@ -159,9 +159,8 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti ); if (DataStream.isFailureStoreFeatureFlagEnabled()) { - List failureStoresRepresentation = (List) dataStreamMap.get( - DataStream.FAILURE_INDICES_FIELD.getPreferredName() - ); + var failureStore = (Map) dataStreamMap.get(DataStream.FAILURE_STORE_FIELD.getPreferredName()); + List failureStoresRepresentation = (List) failureStore.get(DataStream.INDICES_FIELD.getPreferredName()); Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); @@ -185,7 +184,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti .setIndexMode(IndexMode.STANDARD) .setLifecycle(new DataStreamLifecycle(null, null, false)) .setFailureStoreEnabled(true) - .setFailureIndices(failureStores) + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) .build(); String ilmPolicyName = "rollover-30days"; @@ -251,9 +250,8 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti ); if (DataStream.isFailureStoreFeatureFlagEnabled()) { - List failureStoresRepresentation = (List) dataStreamMap.get( - DataStream.FAILURE_INDICES_FIELD.getPreferredName() - ); + var failureStore = (Map) dataStreamMap.get(DataStream.FAILURE_STORE_FIELD.getPreferredName()); + List failureStoresRepresentation = (List) failureStore.get(DataStream.INDICES_FIELD.getPreferredName()); Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java index b9dc6d349873c..41d00d063955d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java @@ -261,7 +261,7 @@ private static DataStream newDataStreamInstance( .setReplicated(replicated) .setLifecycle(lifecycle) .setFailureStoreEnabled(failureStores.isEmpty() == false) - .setFailureIndices(failureStores); + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()); if (randomBoolean()) { builder.setSystem(true); builder.setHidden(true); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index a7ec537823827..20485402b07ae 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -210,8 +210,8 @@ setup: --- "Create data stream with failure store": - requires: - cluster_features: ["gte_v8.11.0"] - reason: "data stream failure stores only creatable in 8.11+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" - do: allowed_warnings: @@ -248,9 +248,9 @@ setup: - match: { data_streams.0.status: 'GREEN' } - match: { data_streams.0.template: 'my-template4' } - match: { data_streams.0.hidden: false } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/'} + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/'} - match: { data_streams.1.name: failure-data-stream2 } - match: { data_streams.1.timestamp_field.name: '@timestamp' } @@ -259,15 +259,15 @@ setup: - match: { data_streams.1.indices.0.index_name: '/\.ds-failure-data-stream2-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - match: { data_streams.1.template: 'my-template4' } - match: { data_streams.1.hidden: false } - - match: { data_streams.1.failure_store: true } - - length: { data_streams.1.failure_indices: 1 } - - match: { data_streams.1.failure_indices.0.index_name: '/\.fs-failure-data-stream2-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.1.failure_store.enabled: true } + - length: { data_streams.1.failure_store.indices: 1 } + - match: { data_streams.1.failure_store.indices.0.index_name: '/\.fs-failure-data-stream2-(\d{4}\.\d{2}\.\d{2}-)?000001/' } # save the backing index names for later use - set: { data_streams.0.indices.0.index_name: idx0name } - - set: { data_streams.0.failure_indices.0.index_name: fsidx0name } + - set: { data_streams.0.failure_store.indices.0.index_name: fsidx0name } - set: { data_streams.1.indices.0.index_name: idx1name } - - set: { data_streams.1.failure_indices.0.index_name: fsidx1name } + - set: { data_streams.1.failure_store.indices.0.index_name: fsidx1name } - do: indices.get_mapping: @@ -538,8 +538,8 @@ setup: --- "Delete data stream with failure stores": - requires: - cluster_features: ["gte_v8.12.0"] - reason: "data stream failure stores only supported in 8.12+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" - do: allowed_warnings: @@ -570,7 +570,7 @@ setup: name: failure-data-stream1 - set: { data_streams.0.indices.0.index_name: idx0name } - - set: { data_streams.0.failure_indices.0.index_name: fs0name } + - set: { data_streams.0.failure_store.indices.0.index_name: fs0name } - do: indices.get: @@ -586,8 +586,8 @@ setup: - match: { data_streams.0.generation: 1 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: indices.delete_data_stream: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml index 8c0e27373664d..a3baa524259b8 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml @@ -92,8 +92,8 @@ --- "Modify a data stream's failure store": - requires: - cluster_features: [ "gte_v8.14.0" ] - reason: "this API was released in 8.14.0" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: [ "allowed_warnings" ] - do: @@ -128,14 +128,14 @@ indices.get_data_stream: name: data-stream-for-modification - set: { data_streams.0.indices.0.index_name: write_index } - - set: { data_streams.0.failure_indices.0.index_name: first_failure_index } - - set: { data_streams.0.failure_indices.1.index_name: write_failure_index } + - set: { data_streams.0.failure_store.indices.0.index_name: first_failure_index } + - set: { data_streams.0.failure_store.indices.1.index_name: write_failure_index } - do: indices.get_data_stream: name: data-stream-for-modification2 - set: { data_streams.0.indices.0.index_name: second_write_index } - - set: { data_streams.0.failure_indices.0.index_name: second_write_failure_index } + - set: { data_streams.0.failure_store.indices.0.index_name: second_write_failure_index } - do: index: @@ -170,11 +170,11 @@ - match: { data_streams.0.timestamp_field.name: '@timestamp' } - match: { data_streams.0.generation: 3 } - length: { data_streams.0.indices: 1 } - - length: { data_streams.0.failure_indices: 3 } + - length: { data_streams.0.failure_store.indices: 3 } - match: { data_streams.0.indices.0.index_name: $write_index } - - match: { data_streams.0.failure_indices.0.index_name: 'test_index1' } - - match: { data_streams.0.failure_indices.1.index_name: $first_failure_index } - - match: { data_streams.0.failure_indices.2.index_name: $write_failure_index } + - match: { data_streams.0.failure_store.indices.0.index_name: 'test_index1' } + - match: { data_streams.0.failure_store.indices.1.index_name: $first_failure_index } + - match: { data_streams.0.failure_store.indices.2.index_name: $write_failure_index } # An index that has an alias is not allowed to be added to failure store - do: @@ -269,10 +269,10 @@ - match: { data_streams.0.timestamp_field.name: '@timestamp' } - match: { data_streams.0.generation: 4 } - length: { data_streams.0.indices: 1 } - - length: { data_streams.0.failure_indices: 2 } + - length: { data_streams.0.failure_store.indices: 2 } - match: { data_streams.0.indices.0.index_name: $write_index } - - match: { data_streams.0.failure_indices.0.index_name: $first_failure_index } - - match: { data_streams.0.failure_indices.1.index_name: $write_failure_index } + - match: { data_streams.0.failure_store.indices.0.index_name: $first_failure_index } + - match: { data_streams.0.failure_store.indices.1.index_name: $write_failure_index } - do: indices.delete_data_stream: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index 7268ee9bb3b56..9dce5150388d4 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -23,8 +23,8 @@ teardown: --- "Redirect ingest failure in data stream to failure store": - requires: - cluster_features: ["gte_v8.13.0"] - reason: "data stream failure stores only redirect ingest failures in 8.13+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: [allowed_warnings, contains] - do: @@ -74,9 +74,9 @@ teardown: - match: { data_streams.0.timestamp_field.name: '@timestamp' } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: search: @@ -152,9 +152,9 @@ teardown: - match: { data_streams.0.timestamp_field.name: '@timestamp' } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: search: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml index 0074ce425c6f9..91d23afa67af9 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -1,8 +1,8 @@ --- setup: - requires: - cluster_features: ["gte_v8.14.0"] - reason: "data stream failure store rollover only supported in 8.14+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: allowed_warnings - do: @@ -48,9 +48,9 @@ setup: - match: { data_streams.0.generation: 2 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 2 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - length: { data_streams.0.failure_store.indices: 2 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } --- "Roll over a data stream's failure store with conditions": @@ -86,9 +86,9 @@ setup: - match: { data_streams.0.generation: 2 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 2 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - length: { data_streams.0.failure_store.indices: 2 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } --- "Don't roll over a data stream's failure store when conditions aren't met": @@ -112,5 +112,5 @@ setup: - match: { data_streams.0.generation: 1 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml index 32338fea056ae..3ab22e6271c6d 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml @@ -50,8 +50,8 @@ --- "Put index template with failure store": - requires: - cluster_features: ["gte_v8.11.0"] - reason: "data stream failure stores only creatable in 8.11+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: allowed_warnings - do: @@ -91,9 +91,9 @@ - match: { data_streams.0.timestamp_field.name: '@timestamp' } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: indices.delete_data_stream: diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index c04dffe82b3cf..0a423cb375e88 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -341,12 +342,15 @@ && hasAtLeastOneGeoipProcessor( ); } + @UpdateForV9 // use MINUS_ONE once that means no timeout + private static final TimeValue MASTER_TIMEOUT = TimeValue.MAX_VALUE; + private void startTask(Runnable onFailure) { persistentTasksService.sendStartRequest( GEOIP_DOWNLOADER, GEOIP_DOWNLOADER, new GeoIpTaskParams(), - null, + MASTER_TIMEOUT, ActionListener.wrap(r -> logger.debug("Started geoip downloader task"), e -> { Throwable t = e instanceof RemoteTransportException ? ExceptionsHelper.unwrapCause(e) : e; if (t instanceof ResourceAlreadyExistsException == false) { @@ -368,7 +372,7 @@ private void stopTask(Runnable onFailure) { } } ); - persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, null, ActionListener.runAfter(listener, () -> { + persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, MASTER_TIMEOUT, ActionListener.runAfter(listener, () -> { IndexAbstraction databasesAbstraction = clusterService.state().metadata().getIndicesLookup().get(DATABASES_INDEX); if (databasesAbstraction != null) { // regardless of whether DATABASES_INDEX is an alias, resolve it to a concrete index diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 2e50cc0a97677..6898e44335793 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -483,22 +483,22 @@ private Map retrieveEnterpriseGeoData(GeoIpDatabase geoIpDatabas } } case HOSTING_PROVIDER -> { - geoData.put("is_hosting_provider", isHostingProvider); + geoData.put("hosting_provider", isHostingProvider); } case TOR_EXIT_NODE -> { - geoData.put("is_tor_exit_node", isTorExitNode); + geoData.put("tor_exit_node", isTorExitNode); } case ANONYMOUS_VPN -> { - geoData.put("is_anonymous_vpn", isAnonymousVpn); + geoData.put("anonymous_vpn", isAnonymousVpn); } case ANONYMOUS -> { - geoData.put("is_anonymous", isAnonymous); + geoData.put("anonymous", isAnonymous); } case PUBLIC_PROXY -> { - geoData.put("is_public_proxy", isPublicProxy); + geoData.put("public_proxy", isPublicProxy); } case RESIDENTIAL_PROXY -> { - geoData.put("is_residential_proxy", isResidentialProxy); + geoData.put("residential_proxy", isResidentialProxy); } } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index f9f79d54522da..ec77cacbdb6b6 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -373,12 +373,12 @@ public void testEnterprise() throws Exception { location.put("lon", -1.25); assertThat(geoData.get("location"), equalTo(location)); assertThat(geoData.get("network"), equalTo("2.125.160.216/29")); - assertThat(geoData.get("is_hosting_provider"), equalTo(false)); - assertThat(geoData.get("is_tor_exit_node"), equalTo(false)); - assertThat(geoData.get("is_anonymous_vpn"), equalTo(false)); - assertThat(geoData.get("is_anonymous"), equalTo(false)); - assertThat(geoData.get("is_public_proxy"), equalTo(false)); - assertThat(geoData.get("is_residential_proxy"), equalTo(false)); + assertThat(geoData.get("hosting_provider"), equalTo(false)); + assertThat(geoData.get("tor_exit_node"), equalTo(false)); + assertThat(geoData.get("anonymous_vpn"), equalTo(false)); + assertThat(geoData.get("anonymous"), equalTo(false)); + assertThat(geoData.get("public_proxy"), equalTo(false)); + assertThat(geoData.get("residential_proxy"), equalTo(false)); } public void testAddressIsNotInTheDatabase() throws Exception { diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index d0cef178dc920..aee0d313e4e00 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.ESNetty4IntegTestCase; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.MockLogAppender; @@ -24,16 +23,14 @@ public class ESLoggingHandlerIT extends ESNetty4IntegTestCase { private MockLogAppender appender; - private Releasable appenderRelease; public void setUp() throws Exception { super.setUp(); - appender = new MockLogAppender(); - appenderRelease = appender.capturing(ESLoggingHandler.class, TransportLogger.class, TcpTransport.class); + appender = MockLogAppender.capture(ESLoggingHandler.class, TransportLogger.class, TcpTransport.class); } public void tearDown() throws Exception { - appenderRelease.close(); + appender.close(); super.tearDown(); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java index 7ce962ff56b67..3035213766584 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java @@ -40,6 +40,7 @@ import org.elasticsearch.transport.netty4.NettyAllocator; import java.io.Closeable; +import java.io.IOException; import java.net.SocketAddress; import java.net.SocketException; import java.nio.charset.StandardCharsets; @@ -203,7 +204,11 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { - if (cause instanceof PrematureChannelClosureException || cause instanceof SocketException) { + if (cause instanceof PrematureChannelClosureException + || cause instanceof SocketException + || (cause instanceof IOException + && cause.getMessage() != null + && cause.getMessage().contains("An established connection was aborted by the software in your host machine"))) { // no more requests coming, so fast-forward the latch fastForward(); } else { diff --git a/muted-tests.yml b/muted-tests.yml index 341d127c7b64a..210215a131339 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,4 +1,6 @@ tests: +- class: "org.elasticsearch.xpack.transform.transforms.scheduling.MonotonicClockTests" + issue: "https://github.com/elastic/elasticsearch/issues/108529" # Examples: # # Mute a single test case in a YAML test suite: diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 81b3a086e9aca..99b2728ebfa3c 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -39,8 +39,10 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; /** * Create a simple "daemon controller", put it in the right place and check that it runs. @@ -64,18 +66,19 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { static { // normally done by ESTestCase, but need here because spawner depends on logging LogConfigurator.loadLog4jPlugins(); + MockLogAppender.init(); } static class ExpectedStreamMessage implements MockLogAppender.LoggingExpectation { final String expectedLogger; final String expectedMessage; - final CountDownLatch matchCalledLatch; - boolean saw; + final CountDownLatch matched; + volatile boolean saw; - ExpectedStreamMessage(String logger, String message, CountDownLatch matchCalledLatch) { + ExpectedStreamMessage(String logger, String message, CountDownLatch matched) { this.expectedLogger = logger; this.expectedMessage = message; - this.matchCalledLatch = matchCalledLatch; + this.matched = matched; } @Override @@ -84,8 +87,8 @@ public void match(LogEvent event) { && event.getLevel().equals(Level.WARN) && event.getMessage().getFormattedMessage().equals(expectedMessage)) { saw = true; + matched.countDown(); } - matchCalledLatch.countDown(); } @Override @@ -129,7 +132,7 @@ public void testNoControllerSpawn() throws IOException { try (Spawner spawner = new Spawner()) { spawner.spawnNativeControllers(environment); - assertThat(spawner.getProcesses(), hasSize(0)); + assertThat(spawner.getProcesses(), is(empty())); } } @@ -203,16 +206,16 @@ private void assertControllerSpawns(final Function pluginsDir String stdoutLoggerName = "test_plugin-controller-stdout"; String stderrLoggerName = "test_plugin-controller-stderr"; - MockLogAppender appender = new MockLogAppender(); Loggers.setLevel(LogManager.getLogger(stdoutLoggerName), Level.TRACE); Loggers.setLevel(LogManager.getLogger(stderrLoggerName), Level.TRACE); CountDownLatch messagesLoggedLatch = new CountDownLatch(2); - if (expectSpawn) { - appender.addExpectation(new ExpectedStreamMessage(stdoutLoggerName, "I am alive", messagesLoggedLatch)); - appender.addExpectation(new ExpectedStreamMessage(stderrLoggerName, "I am an error", messagesLoggedLatch)); - } - try (var ignore = appender.capturing(stdoutLoggerName, stderrLoggerName)) { + try (var appender = MockLogAppender.capture(stdoutLoggerName, stderrLoggerName)) { + if (expectSpawn) { + appender.addExpectation(new ExpectedStreamMessage(stdoutLoggerName, "I am alive", messagesLoggedLatch)); + appender.addExpectation(new ExpectedStreamMessage(stderrLoggerName, "I am an error", messagesLoggedLatch)); + } + Spawner spawner = new Spawner(); spawner.spawnNativeControllers(environment); @@ -228,7 +231,7 @@ private void assertControllerSpawns(final Function pluginsDir // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling assertTrue(process.waitFor(1, TimeUnit.SECONDS)); } else { - assertThat(processes, hasSize(0)); + assertThat(processes, is(empty())); } appender.assertAllExpectationsMatched(); } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index b1240747b1a67..81ac8ab1200f6 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -1219,6 +1219,7 @@ public void test500Readiness() throws Exception { builder().envVar("readiness.port", "9399").envVar("xpack.security.enabled", "false").envVar("discovery.type", "single-node") ); waitForElasticsearch(installation); + dumpDebug(); assertTrue(readinessProbe(9399)); } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java index ecc043906bd1a..787069eb2605c 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java @@ -264,7 +264,7 @@ public static Shell.Result startElasticsearchWithTty( Locale.ROOT, """ expect - <() { + @Override + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + public String get() { + return repoDirectory.getRoot().getPath(); + } + }) + .setting("xpack.security.enabled", "false") + .feature(FeatureFlag.TIME_SERIES_MODE) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + protected AbstractRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java index 0487b282179a9..73abb634dfd76 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; -public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { +public class ClusterFeatureMigrationIT extends AbstractRollingUpgradeTestCase { @Before public void checkMigrationVersion() { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 73d91ac41fcb7..c7f99b3525f74 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -public class DesiredNodesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { private final int desiredNodesVersion; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 757f793ac4c46..488cd966ed65e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class DownsampleIT extends ParameterizedRollingUpgradeTestCase { +public class DownsampleIT extends AbstractRollingUpgradeTestCase { private static final String FIXED_INTERVAL = "1h"; private String index; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java index 4fe45c05b157b..fc77eef0ae8bb 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FeatureUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class FeatureUpgradeIT extends AbstractRollingUpgradeTestCase { public FeatureUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java index 860cd2c0e8617..306447d8cc2cd 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java @@ -40,7 +40,7 @@ * the co-ordinating node if older nodes were included in the system */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103473") -public class FieldCapsIT extends ParameterizedRollingUpgradeTestCase { +public class FieldCapsIT extends AbstractRollingUpgradeTestCase { public FieldCapsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 0f210ee4b2450..6647cb413c9f5 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -20,7 +20,7 @@ import static org.hamcrest.CoreMatchers.equalTo; -public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java index 874fac615b9b1..1477e2b63cf03 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java @@ -26,7 +26,7 @@ import java.util.Locale; import java.util.Map; -public class IgnoredMetaFieldRollingUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class IgnoredMetaFieldRollingUpgradeIT extends AbstractRollingUpgradeTestCase { private static final String TERMS_AGG_QUERY = Strings.format(""" { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 82485130f05ce..157e2293b69ae 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -51,7 +51,7 @@ * xpack rolling restart tests. We should work on a way to remove this * duplication but for now we have no real way to share code. */ -public class IndexingIT extends ParameterizedRollingUpgradeTestCase { +public class IndexingIT extends AbstractRollingUpgradeTestCase { public IndexingIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java new file mode 100644 index 0000000000000..2acaf33c2130c --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.UpdateForV9; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; + +@UpdateForV9 +public class NodesCapabilitiesUpgradeIT extends AbstractRollingUpgradeTestCase { + + private static Boolean upgradingBeforeCapabilities; + + public NodesCapabilitiesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Before + public void checkBeforeHasNoCapabilities() throws IOException { + if (upgradingBeforeCapabilities == null) { + // try to do a _capabilities query on a node before we upgrade + try { + clusterHasCapability("GET", "_capabilities", List.of(), List.of()); + upgradingBeforeCapabilities = false; + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 400) { + upgradingBeforeCapabilities = true; + } else { + throw e; + } + } + } + + assumeTrue("Only valid when upgrading from versions without capabilities API", upgradingBeforeCapabilities); + } + + public void testCapabilitiesReturnsFalsePartiallyUpgraded() throws IOException { + if (isMixedCluster()) { + // capabilities checks should either fail (if talking to an old node), + // or return false as not all nodes have the API (if talking to a new node) + try { + assertThat( + "Upgraded node should report no capabilities supported", + clusterHasCapability("GET", "_capabilities", List.of(), List.of()), + isPresentWith(false) + ); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() != 400) { + // throw explicitly to capture exception too + throw new AssertionError("Old node should not have the capabilities API", e); + } + } + } + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 63ed54d05adf2..d5f645c387d61 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -14,74 +14,45 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.TestFeatureService; import org.junit.AfterClass; import org.junit.Before; -import org.junit.ClassRule; -import org.junit.rules.RuleChain; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.TestRule; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.function.Supplier; import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public abstract class ParameterizedRollingUpgradeTestCase extends ESRestTestCase { + protected static final int NODE_NUM = 3; private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); - - private static final TemporaryFolder repoDirectory = new TemporaryFolder(); - - private static final int NODE_NUM = 3; - - private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) - .nodes(NODE_NUM) - .setting("path.repo", new Supplier<>() { - @Override - @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") - public String get() { - return repoDirectory.getRoot().getPath(); - } - }) - .setting("xpack.security.enabled", "false") - .feature(FeatureFlag.TIME_SERIES_MODE) - .build(); - - @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); - - @ParametersFactory(shuffle = false) - public static Iterable parameters() { - return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); - } - private static final Set upgradedNodes = new HashSet<>(); private static TestFeatureService oldClusterTestFeatureService = null; private static boolean upgradeFailed = false; private static IndexVersion oldIndexVersion; - private final int requestedUpgradedNodes; protected ParameterizedRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { this.requestedUpgradedNodes = upgradedNodes; } + @ParametersFactory(shuffle = false) + public static Iterable parameters() { + return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); + } + + protected abstract ElasticsearchCluster getUpgradeCluster(); + @Before public void extractOldClusterFeatures() { if (isOldCluster() && oldClusterTestFeatureService == null) { @@ -135,7 +106,7 @@ public void upgradeNode() throws Exception { if (upgradedNodes.add(n)) { try { logger.info("Upgrading node {} to version {}", n, Version.CURRENT); - cluster.upgradeNodeToVersion(n, Version.CURRENT); + getUpgradeCluster().upgradeNodeToVersion(n, Version.CURRENT); } catch (Exception e) { upgradeFailed = true; throw e; @@ -199,7 +170,7 @@ protected static boolean isUpgradedCluster() { @Override protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + return getUpgradeCluster().getHttpAddresses(); } @Override diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index ef80643c82c0d..593630546845d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; -public class SnapshotBasedRecoveryIT extends ParameterizedRollingUpgradeTestCase { +public class SnapshotBasedRecoveryIT extends AbstractRollingUpgradeTestCase { public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java index fbd6ee8aa3759..a2e3b03c9036f 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SystemIndicesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class SystemIndicesUpgradeIT extends AbstractRollingUpgradeTestCase { public SystemIndicesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 3ce0fc79087c2..2889885f83984 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class TsdbIT extends ParameterizedRollingUpgradeTestCase { +public class TsdbIT extends AbstractRollingUpgradeTestCase { public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 3af344051030b..8dc3b43abf3e1 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -24,7 +24,7 @@ import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.is; -public class UpgradeWithOldIndexSettingsIT extends ParameterizedRollingUpgradeTestCase { +public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java index e78e0978b1d80..21dbad9487d4e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; -public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { +public class VectorSearchIT extends AbstractRollingUpgradeTestCase { public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java index dade5b53addae..6379a8875dfb4 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java @@ -22,7 +22,7 @@ * Basic tests for simple xpack functionality that are only run if the * cluster is the on the default distribution. */ -public class XPackIT extends ParameterizedRollingUpgradeTestCase { +public class XPackIT extends AbstractRollingUpgradeTestCase { public XPackIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java index eaf439f264ad5..d04c8802635d3 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java @@ -72,7 +72,7 @@ public void testDanglingIndicesCanBeListed() throws Exception { internalCluster().startNodes(3, buildSettings(0)); final DanglingIndexDetails danglingIndexDetails = createDanglingIndices(INDEX_NAME); - final String stoppedNodeId = mapNodeNameToId(danglingIndexDetails.stoppedNodeName); + final String stoppedNodeId = getNodeId(danglingIndexDetails.stoppedNodeName); final RestClient restClient = getRestClient(); @@ -163,7 +163,12 @@ public void testDanglingIndicesCanBeDeleted() throws Exception { // tombstone has been pushed out of the graveyard. createIndex("additional"); deleteIndex("additional"); - assertThat(listDanglingIndexIds(), is(empty())); + // reading dangling index metadata happens without the all shard locks + // (as we do not know the index name from the index directory structure). + // As a result the index directory could be updated or deleted in the meanwhile by any concurrent operation + // and result in the node request failure that is going to be propagated to the API call. + // Since dandling index API is a best effort we expect such failures to be retried on the client level. + assertBusy(() -> assertThat(listDanglingIndexIds(), is(empty()))); } private List listDanglingIndexIds() throws IOException { @@ -171,15 +176,14 @@ private List listDanglingIndexIds() throws IOException { assertOK(response); final XContentTestUtils.JsonMapView mapView = createJsonMapView(response.getEntity().getContent()); + logger.warn("dangling API response: {}", mapView); assertThat(mapView.get("_nodes.total"), equalTo(3)); assertThat(mapView.get("_nodes.successful"), equalTo(3)); assertThat(mapView.get("_nodes.failed"), equalTo(0)); List indices = mapView.get("dangling_indices"); - List danglingIndexIds = new ArrayList<>(); - for (int i = 0; i < indices.size(); i++) { danglingIndexIds.add(mapView.get("dangling_indices." + i + ".index_uuid")); } @@ -187,23 +191,6 @@ private List listDanglingIndexIds() throws IOException { return danglingIndexIds; } - /** - * Given a node name, finds the corresponding node ID. - */ - private String mapNodeNameToId(String nodeName) throws IOException { - final Response catResponse = getRestClient().performRequest(new Request("GET", "/_cat/nodes?full_id&h=id,name")); - assertOK(catResponse); - - for (String nodeLine : Streams.readAllLines(catResponse.getEntity().getContent())) { - String[] elements = nodeLine.split(" "); - if (elements[1].equals(nodeName)) { - return elements[0]; - } - } - - throw new AssertionError("Failed to map node name [" + nodeName + "] to node ID"); - } - /** * Helper that creates one or more indices, and importantly, * checks that they are green before proceeding. This is important diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json new file mode 100644 index 0000000000000..28c341d9983cc --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json @@ -0,0 +1,47 @@ +{ + "capabilities": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/capabilities.html", + "description": "Checks if the specified combination of method, API, parameters, and arbitrary capabilities are supported" + }, + "stability": "experimental", + "visibility": "private", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_capabilities", + "methods": [ + "GET" + ] + } + ] + }, + "params": { + "method": { + "type": "enum", + "description": "REST method to check", + "options": [ + "GET", "HEAD", "POST", "PUT", "DELETE" + ], + "default": "GET" + }, + "path": { + "type": "string", + "description": "API path to check" + }, + "parameters": { + "type": "string", + "description": "Comma-separated list of API parameters to check" + }, + "capabilities": { + "type": "string", + "description": "Comma-separated list of arbitrary API capabilities to check" + } + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml new file mode 100644 index 0000000000000..715e696bd1032 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -0,0 +1,28 @@ +--- +"Capabilities API": + + - requires: + capabilities: + - method: GET + path: /_capabilities + parameters: [method, path, parameters, capabilities] + capabilities: [] + reason: "capabilities api requires itself to be supported" + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: method,path,parameters,capabilities + error_trace: false + + - match: { supported: true } + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: unknown + error_trace: false + + - match: { supported: false } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml index ae3eadded108b..86f02641d86f1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml @@ -78,24 +78,6 @@ setup: - match: {test_index2.settings.index.refresh_interval: 10s} - is_false: foo.settings.index.refresh_interval ---- -"put settings in list of indices": - - skip: - awaits_fix: list of indices not implemented yet - - do: - indices.put_settings: - index: test_index1, test_index2 - body: - refresh_interval: 10s - - - do: - indices.get_settings: {} - - - match: {test_index1.settings.index.refresh_interval: 10s} - - match: {test_index2.settings.index.refresh_interval: 10s} - - is_false: foo.settings.index.refresh_interval - - --- "put settings in blank index": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 3ae8f8b09aa4a..ca1d22e4a1ce7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -14,12 +14,26 @@ setup: "postings": "type": "text" "index_options": "offsets" + "nested": + "type": "nested" + "properties": + "text": + "type": "text" + "vectors": + "type": "dense_vector" + "dims": 2 + "index": true + "similarity": "l2_norm" + - do: index: index: test id: "1" body: "text" : "The quick brown fox is brown." + "nested": + "text": "The quick brown fox is brown." + "vectors": [1, 2] - do: indices.refresh: {} @@ -43,6 +57,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -58,6 +73,7 @@ teardown: "query" : { "combined_fields" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -72,11 +88,13 @@ teardown: search: body: { "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } } } + "highlight": { "type": "unified", "fields": { "*": { } } } + } - - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } - do: indices.put_settings: @@ -90,6 +108,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "type": "phrase", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -100,43 +119,69 @@ teardown: reason: 'kNN was not correctly skipped until 8.12' - do: - indices.create: - index: test-highlighting-knn - body: - mappings: - "properties": - "vectors": - "type": "dense_vector" - "dims": 2 - "index": true - "similarity": "l2_norm" - "text": - "type": "text" - "fields": - "fvh": - "type": "text" - "term_vector": "with_positions_offsets" - "postings": - "type": "text" - "index_options": "offsets" - - do: - index: - index: test-highlighting-knn - id: "1" - body: - "text" : "The quick brown fox is brown." - "vectors": [1, 2] + search: + index: test + body: { + "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, + "highlight": { "type": "unified", "fields": { "text*": { } } }, + "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + +--- +"Test nested queries automatically disable weighted mode": + - requires: + cluster_features: "gte_v8.15.0" + reason: 'nested was not correctly skipped until 8.15' + - do: - indices.refresh: {} + search: + index: test + body: { + "query": { + "nested": { + "path": "nested", + "query": { + "multi_match": { + "query": "quick brown fox", + "type": "phrase", + "fields": [ "nested.text" ] + } + } + } + }, + "highlight": { "type": "unified", "fields": { "*": { } } } + } + + - length: { hits.hits.0.highlight: 1 } + - match: { hits.hits.0.highlight.nested\.text.0: "The quick brown fox is brown." } - do: search: - index: test-highlighting-knn + index: test body: { - "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } }, - "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + "query": { + "bool": { + "must_not": { + "nested": { + "path": "nested", + "query": { + "multi_match": { "query": "quick red fox", "type": "phrase", "fields": [ "nested.text" ] } + } + } + }, + "should": { + "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } + } + } + }, + "highlight": { "type": "unified", "fields": { "text*": { } } } + } + - length: { hits.hits.0.highlight: 3 } - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml deleted file mode 100644 index 81be6f82d8a14..0000000000000 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml +++ /dev/null @@ -1,30 +0,0 @@ ---- -"Metadata Fields": - - - skip: - awaits_fix: "Update doesn't return metadata fields, waiting for #3259" - - - do: - indices.create: - index: test_1 - - - do: - update: - index: test_1 - id: "1" - parent: 5 - fields: [ _routing ] - body: - doc: { foo: baz } - upsert: { foo: bar } - - - match: { get._routing: "5" } - - - do: - get: - index: test_1 - id: "1" - parent: 5 - stored_fields: [ _routing ] - - diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 3b9d3e133b63a..fcccc0051f0cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -387,17 +387,16 @@ public void testMessageLogging() { ) .get(); - MockLogAppender dryRunMockLog = new MockLogAppender(); - dryRunMockLog.addExpectation( - new MockLogAppender.UnseenEventExpectation( - "no completed message logged on dry run", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" - ) - ); + try (var dryRunMockLog = MockLogAppender.capture(TransportClusterRerouteAction.class)) { + dryRunMockLog.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "no completed message logged on dry run", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + ) + ); - try (var ignored = dryRunMockLog.capturing(TransportClusterRerouteAction.class)) { AllocationCommand dryRunAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true); ClusterRerouteResponse dryRunResponse = clusterAdmin().prepareReroute() .setExplain(randomBoolean()) @@ -412,24 +411,23 @@ public void testMessageLogging() { dryRunMockLog.assertAllExpectationsMatched(); } - MockLogAppender allocateMockLog = new MockLogAppender(); - allocateMockLog.addExpectation( - new MockLogAppender.SeenEventExpectation( - "message for first allocate empty primary", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" + nodeName1 + "*" - ) - ); - allocateMockLog.addExpectation( - new MockLogAppender.UnseenEventExpectation( - "no message for second allocate empty primary", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" + nodeName2 + "*" - ) - ); - try (var ignored = allocateMockLog.capturing(TransportClusterRerouteAction.class)) { + try (var allocateMockLog = MockLogAppender.capture(TransportClusterRerouteAction.class)) { + allocateMockLog.addExpectation( + new MockLogAppender.SeenEventExpectation( + "message for first allocate empty primary", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + nodeName1 + "*" + ) + ); + allocateMockLog.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "no message for second allocate empty primary", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + nodeName2 + "*" + ) + ); AllocationCommand yesDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true); AllocationCommand noDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand("noexist", 1, nodeName2, true); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java index c47ada432f4b1..0b9ca9d9f9586 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java @@ -175,6 +175,7 @@ public void testMustAcceptDataLossToImportDanglingIndex() throws Exception { * other will be considered dangling, and can therefore be listed and * deleted through the API */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108288") public void testDanglingIndexCanBeDeleted() throws Exception { final Settings settings = buildSettings(1, true); internalCluster().startNodes(3, settings); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java index 7e4ae040caeca..9b60044c94f70 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java @@ -15,8 +15,8 @@ import java.io.IOException; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class SimpleNodesCapabilitiesIT extends ESIntegTestCase { @@ -31,25 +31,25 @@ public void testNodesCapabilities() throws IOException { NodesCapabilitiesResponse response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(true)); + assertThat(response.isSupported(), isPresentWith(true)); // check we support some parameters of the capabilities API response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "path")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(true)); + assertThat(response.isSupported(), isPresentWith(true)); // check we don't support some other parameters of the capabilities API response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "invalid")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(false)); + assertThat(response.isSupported(), isPresentWith(false)); // check we don't support a random invalid api // TODO this is not working yet - see https://github.com/elastic/elasticsearch/issues/107425 /*response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_invalid")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(false));*/ + assertThat(response.isSupported(), isPresentWith(false));*/ } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java index 58dcfdaec5147..38bc372868df0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java @@ -243,7 +243,7 @@ public void testReservedStatePersistsOnRestart() throws Exception { FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); - assertTrue(masterFileSettingsService.watching()); + assertBusy(() -> assertTrue(masterFileSettingsService.watching())); logger.info("--> write some settings"); writeJSONFile(masterNode, testJSON); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java new file mode 100644 index 0000000000000..a12a26d69c5ff --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; + +public class CollapseSearchResultsIT extends ESIntegTestCase { + + public void testCollapse() { + final String indexName = "test_collapse"; + createIndex(indexName); + final String collapseField = "collapse_field"; + assertAcked(indicesAdmin().preparePutMapping(indexName).setSource(collapseField, "type=keyword")); + index(indexName, "id_1", Map.of(collapseField, "value1")); + index(indexName, "id_2", Map.of(collapseField, "value2")); + refresh(indexName); + assertNoFailuresAndResponse( + prepareSearch(indexName).setQuery(new MatchAllQueryBuilder()) + .setCollapse(new CollapseBuilder(collapseField).setInnerHits(new InnerHitBuilder("ih").setSize(2))), + searchResponse -> { + assertEquals(collapseField, searchResponse.getHits().getCollapseField()); + assertEquals(Set.of(new BytesRef("value1"), new BytesRef("value2")), Set.of(searchResponse.getHits().getCollapseValues())); + } + ); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index c9a6cfaf754c6..71402d3e9c1d8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -22,6 +22,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -83,9 +84,11 @@ public void setupSuiteScopeCluster() throws Exception { } indexRandom(true, builders); ensureSearchable(); + // Force merge to ensure segment consistency as any segment merging can change which particular documents + // are sampled + assertNoFailures(indicesAdmin().prepareForceMerge("idx").setMaxNumSegments(1).get()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105839") public void testRandomSamplerConsistentSeed() { double[] sampleMonotonicValue = new double[1]; double[] sampleNumericValue = new double[1]; diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 31768ab85474d..2983a2d62de71 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1902,18 +1902,8 @@ private enum ElasticsearchExceptionHandle { 175, TransportVersions.V_8_12_0 ), - SEARCH_TIMEOUT_EXCEPTION( - SearchTimeoutException.class, - SearchTimeoutException::new, - 176, - TransportVersions.SEARCH_TIMEOUT_EXCEPTION_ADDED - ), - INGEST_GRAPH_STRUCTURE_EXCEPTION( - GraphStructureException.class, - GraphStructureException::new, - 177, - TransportVersions.INGEST_GRAPH_STRUCTURE_EXCEPTION - ), + SEARCH_TIMEOUT_EXCEPTION(SearchTimeoutException.class, SearchTimeoutException::new, 176, TransportVersions.V_8_13_0), + INGEST_GRAPH_STRUCTURE_EXCEPTION(GraphStructureException.class, GraphStructureException::new, 177, TransportVersions.V_8_13_0), FAILURE_INDEX_NOT_SUPPORTED_EXCEPTION( FailureIndexNotSupportedException.class, FailureIndexNotSupportedException::new, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1cc7e47cddda3..f1232d2442c8b 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -97,43 +97,9 @@ static TransportVersion def(int id) { public static final TransportVersion V_8_10_X = def(8_500_061); public static final TransportVersion V_8_11_X = def(8_512_00_1); public static final TransportVersion V_8_12_0 = def(8_560_00_0); - public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH = def(8_560_00_1); - public static final TransportVersion NODE_STATS_REQUEST_SIMPLIFIED = def(8_561_00_0); - public static final TransportVersion TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED = def(8_562_00_0); - public static final TransportVersion ESQL_ASYNC_QUERY = def(8_563_00_0); - public static final TransportVersion ESQL_STATUS_INCLUDE_LUCENE_QUERIES = def(8_564_00_0); - public static final TransportVersion ESQL_CLUSTER_ALIAS = def(8_565_00_0); - public static final TransportVersion SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED = def(8_566_00_0); - public static final TransportVersion SMALLER_RELOAD_SECURE_SETTINGS_REQUEST = def(8_567_00_0); - public static final TransportVersion UPDATE_API_KEY_EXPIRATION_TIME_ADDED = def(8_568_00_0); - public static final TransportVersion LAZY_ROLLOVER_ADDED = def(8_569_00_0); - public static final TransportVersion ESQL_PLAN_POINT_LITERAL_WKB = def(8_570_00_0); - public static final TransportVersion HOT_THREADS_AS_BYTES = def(8_571_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED = def(8_572_00_0); - public static final TransportVersion ESQL_ENRICH_POLICY_CCQ_MODE = def(8_573_00_0); - public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ = def(8_574_00_0); - public static final TransportVersion PEERFINDER_REPORTS_PEERS_MASTERS = def(8_575_00_0); - public static final TransportVersion ESQL_MULTI_CLUSTERS_ENRICH = def(8_576_00_0); - public static final TransportVersion NESTED_KNN_MORE_INNER_HITS = def(8_577_00_0); - public static final TransportVersion REQUIRE_DATA_STREAM_ADDED = def(8_578_00_0); - public static final TransportVersion ML_INFERENCE_COHERE_EMBEDDINGS_ADDED = def(8_579_00_0); - public static final TransportVersion DESIRED_NODE_VERSION_OPTIONAL_STRING = def(8_580_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED = def(8_581_00_0); - public static final TransportVersion ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE = def(8_582_00_0); - public static final TransportVersion KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM = def(8_583_00_0); - public static final TransportVersion TRANSFORM_GET_BASIC_STATS = def(8_584_00_0); - public static final TransportVersion NLP_DOCUMENT_CHUNKING_ADDED = def(8_585_00_0); - public static final TransportVersion SEARCH_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); - public static final TransportVersion ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED = def(8_587_00_0); - public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_REPOS = def(8_588_00_0); - public static final TransportVersion RESOLVE_CLUSTER_ENDPOINT_ADDED = def(8_589_00_0); - public static final TransportVersion FIELD_CAPS_FIELD_HAS_VALUE = def(8_590_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED = def(8_591_00_0); - public static final TransportVersion ML_DIMENSIONS_SET_BY_USER_ADDED = def(8_592_00_0); - public static final TransportVersion INDEX_REQUEST_NORMALIZED_BYTES_PARSED = def(8_593_00_0); - public static final TransportVersion INGEST_GRAPH_STRUCTURE_EXCEPTION = def(8_594_00_0); + public static final TransportVersion V_8_12_1 = def(8_560_00_1); public static final TransportVersion V_8_13_0 = def(8_595_00_0); - public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13 = def(8_595_00_1); + public static final TransportVersion V_8_13_4 = def(8_595_00_1); // 8.14.0+ public static final TransportVersion RANDOM_AGG_SHARD_SEED = def(8_596_00_0); public static final TransportVersion ESQL_TIMINGS = def(8_597_00_0); @@ -195,7 +161,10 @@ static TransportVersion def(int id) { public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); - + public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); + public static final TransportVersion JOIN_STATUS_AGE_SERIALIZATION = def(8_656_00_0); + public static final TransportVersion ML_RERANK_DOC_OPTIONAL = def(8_657_00_0); + public static final TransportVersion FAILURE_STORE_FIELD_PARITY = def(8_658_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index a2e04d0bf3d48..dc161766b7954 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -174,6 +174,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_2 = new Version(8_13_02_99); public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_13_4 = new Version(8_13_04_99); + public static final Version V_8_13_5 = new Version(8_13_05_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version V_8_15_0 = new Version(8_15_00_99); public static final Version CURRENT = V_8_15_0; diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index d07717857169b..21f3df2ab7175 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -31,17 +31,94 @@ import static org.elasticsearch.action.ActionListenerImplementations.safeOnFailure; /** - * A listener for action responses or failures. + *

+ * Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code + * which doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become + * available. They support several useful control flows: + *

+ *
    + *
  • They can be completed immediately on the calling thread.
  • + *
  • They can be completed concurrently on a different thread.
  • + *
  • They can be stored in a data structure and completed later on when the system reaches a particular state.
  • + *
  • Most commonly, they can be passed on to other methods that themselves require a callback.
  • + *
  • They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run + * before or after completion, before passing them on.
  • + *
+ *

+ * {@link ActionListener} is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. {@link + * ActionListener} is used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes + * it easier to compose parts of the system together without needing to build adapters to convert back and forth between different kinds of + * callback. It also makes it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely + * takes practice and is certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with {@link + * ActionListener} instances themselves, creating new instances out of existing ones and completing them in interesting ways. See for + * instance: + *

+ *
    + *
  • All the static methods on {@link ActionListener} itself.
  • + *
  • {@link org.elasticsearch.action.support.ThreadedActionListener} for forking work elsewhere.
  • + *
  • {@link org.elasticsearch.action.support.RefCountingListener} for running work in parallel.
  • + *
  • {@link org.elasticsearch.action.support.SubscribableListener} for constructing flexible workflows.
  • + *
+ *

+ * Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous + * code without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too + * expensive to waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means + * that most of our code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes + * a callback. The entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at {@link + * org.elasticsearch.rest.BaseRestHandler}{@code #prepareRequest} and transport APIs all start at {@link + * org.elasticsearch.action.support.TransportAction}{@code #doExecute} and the whole system fundamentally works in terms of an event loop + * (an {@code io.netty.channel.EventLoop}) which processes network events via callbacks. + *

+ *

+ * {@link ActionListener} is not an ad-hoc invention. Formally speaking, it is our implementation of the general concept of a + * continuation in the sense of continuation-passing style + * (CPS): an extra argument to a function which defines how to continue the computation when the result is available. This is in contrast to + * direct style which is the more usual style of calling methods that return values directly back to the caller so they can continue + * executing as normal. There's essentially two ways that computation can continue in Java (it can return a value or it can throw an + * exception) which is why {@link ActionListener} has both an {@link #onResponse} and an {@link #onFailure} method. + *

+ *

+ * CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS + * also enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in + * parallel, perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be + * satisfied before proceeding (e.g. {@link org.elasticsearch.action.support.SubscribableListener} amongst many others). Some languages have + * first-class support for continuations (e.g. the {@code async} and {@code await} primitives in C#) allowing the programmer to write code + * in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all the callbacks + * ourselves. + *

+ *

+ * Strictly speaking, CPS requires that a computation only continues by calling the continuation. In Elasticsearch, this means that + * asynchronous methods must have {@code void} return type and may not throw any exceptions. This is mostly the case in our code as written + * today, and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In + * particular, it's not uncommon to permit some methods to throw an exception, using things like {@link ActionListener#run} (or an + * equivalent {@code try ... catch ...} block) further up the stack to handle it. Some methods also take (and may complete) an {@link + * ActionListener} parameter, but still return a value separately for other local synchronous work. + *

+ *

+ * This pattern is often used in the transport action layer with the use of the {@link + * org.elasticsearch.action.support.ChannelActionListener} class, which wraps a {@link org.elasticsearch.transport.TransportChannel} + * produced by the transport layer.{@link org.elasticsearch.transport.TransportChannel} implementations can hold a reference to a Netty + * channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, + * so a call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, + * barring caller timeouts. + *

+ *

+ * Note that we explicitly avoid {@link java.util.concurrent.CompletableFuture} and other similar mechanisms as much as possible. They + * can achieve the same goals as {@link ActionListener}, but can also easily be misused in various ways that lead to severe bugs. In + * particular, futures support blocking while waiting for a result, but this is almost never appropriate in Elasticsearch's production code + * where threads are such a precious resource. Moreover if something throws an {@link Error} then the JVM should exit pretty much straight + * away, but {@link java.util.concurrent.CompletableFuture} can catch an {@link Error} which delays the JVM exit until its result is + * observed. This may be much later, or possibly even never. It's not possible to introduce such bugs when using {@link ActionListener}. + *

*/ public interface ActionListener { /** - * Handle action response. This response may constitute a failure or a - * success but it is up to the listener to make that decision. + * Complete this listener with a successful (or at least, non-exceptional) response. */ void onResponse(Response response); /** - * A failure caused by an exception at some phase of the task. + * Complete this listener with an exceptional response. */ void onFailure(Exception e); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java index e6de1faa1aff7..91561814fea1a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java @@ -49,6 +49,7 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest { - public DesiredBalanceRequest() {} + public DesiredBalanceRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public DesiredBalanceRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java index 75434ff554b9c..f26921fd47260 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java @@ -103,6 +103,7 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) public static class Request extends MasterNodeReadRequest { public Request(TaskId parentTaskId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setParentTask(parentTaskId); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java index 75877cf0630f4..82e4e4123e4fe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java @@ -57,6 +57,7 @@ public AddVotingConfigExclusionsRequest(String... nodeNames) { * @param timeout How long to wait for the added exclusions to take effect and be removed from the voting configuration. */ public AddVotingConfigExclusionsRequest(String[] nodeIds, String[] nodeNames, TimeValue timeout) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (timeout.compareTo(TimeValue.ZERO) < 0) { throw new IllegalArgumentException("timeout [" + timeout + "] must be non-negative"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java index f8f64edad2974..2ddd27261db0f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java @@ -26,7 +26,9 @@ public class ClearVotingConfigExclusionsRequest extends MasterNodeRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java index e6e2616e67662..46e41d306cefe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java @@ -102,7 +102,9 @@ public ClusterState afterBatchExecution(ClusterState clusterState, boolean clust } public static class Request extends AcknowledgedRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java index c7c2b9a290a2e..3d8cdb4b405f8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java @@ -48,6 +48,7 @@ public class UpdateDesiredNodesRequest extends AcknowledgedRequest nodes, boolean dryRun) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); assert historyID != null; assert nodes != null; this.historyID = historyID; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java index a94555f1dfd1c..2b60e2d4a5ffa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java @@ -37,9 +37,12 @@ public class ClusterHealthRequest extends MasterNodeReadRequest { public GetFeatureUpgradeStatusRequest() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); } public GetFeatureUpgradeStatusRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java index ccc4a62a1138f..36a90ae9afe33 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java @@ -20,7 +20,7 @@ public class PostFeatureUpgradeRequest extends MasterNodeRequest { public PostFeatureUpgradeRequest() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); } public PostFeatureUpgradeRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java index 63fdb9f7da08a..3527b8cc46840 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; public class NodesCapabilitiesResponse extends BaseNodesResponse implements ToXContentFragment { protected NodesCapabilitiesResponse(ClusterName clusterName, List nodes, List failures) { @@ -35,12 +36,20 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws TransportAction.localOnly(); } - public boolean isSupported() { - return getNodes().isEmpty() == false && getNodes().stream().allMatch(NodeCapability::isSupported); + public Optional isSupported() { + if (hasFailures() || getNodes().isEmpty()) { + // there's no nodes in the response (uh? what about ourselves?) + // or there's a problem (hopefully transient) talking to one or more nodes. + // We don't have enough information to decide if it's supported or not, so return unknown + return Optional.empty(); + } + + return Optional.of(getNodes().stream().allMatch(NodeCapability::isSupported)); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.field("supported", isSupported()); + Optional supported = isSupported(); + return builder.field("supported", supported.orElse(null)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java index 7e392775bf42e..71aa95908d3b7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.node.capabilities; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -18,8 +19,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; @@ -27,6 +30,7 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; public class TransportNodesCapabilitiesAction extends TransportNodesAction< @@ -38,6 +42,7 @@ public class TransportNodesCapabilitiesAction extends TransportNodesAction< public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/capabilities"); private final RestController restController; + private final FeatureService featureService; @Inject public TransportNodesCapabilitiesAction( @@ -45,7 +50,8 @@ public TransportNodesCapabilitiesAction( ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - RestController restController + RestController restController, + FeatureService featureService ) { super( TYPE.name(), @@ -56,6 +62,23 @@ public TransportNodesCapabilitiesAction( threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.restController = restController; + this.featureService = featureService; + } + + @Override + protected void doExecute(Task task, NodesCapabilitiesRequest request, ActionListener listener) { + if (featureService.clusterHasFeature(clusterService.state(), RestNodesCapabilitiesAction.CAPABILITIES_ACTION) == false) { + // not everything in the cluster supports capabilities. + // Therefore we don't support whatever it is we're being asked for + listener.onResponse(new NodesCapabilitiesResponse(clusterService.getClusterName(), List.of(), List.of()) { + @Override + public Optional isSupported() { + return Optional.of(false); + } + }); + } else { + super.doExecute(task, request, listener); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java index 1118a6318ddf7..ef5d7f5e74ef8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java @@ -27,7 +27,7 @@ public class NodeHotThreads extends BaseNodeResponse { NodeHotThreads(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { bytes = in.readReleasableBytesReference(); } else { bytes = ReleasableBytesReference.wrap(new BytesArray(in.readString().getBytes(StandardCharsets.UTF_8))); @@ -56,7 +56,7 @@ public java.io.Reader getHotThreadsReader() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBytesReference(bytes); } else { out.writeString(bytes.utf8ToString()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java index c24833dca49ee..a83a09af642fa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java @@ -97,7 +97,7 @@ public static class NodeRequest extends TransportRequest { NodeRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().before(TransportVersions.SMALLER_RELOAD_SECURE_SETTINGS_REQUEST)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { TaskId.readFromStream(in); in.readStringArray(); in.readOptionalArray(DiscoveryNode::new, DiscoveryNode[]::new); @@ -131,7 +131,7 @@ public void writeTo(StreamOutput out) throws IOException { assert hasReferences(); super.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.SMALLER_RELOAD_SECURE_SETTINGS_REQUEST)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { TaskId.EMPTY_TASK_ID.writeTo(out); out.writeStringArray(Strings.EMPTY_ARRAY); out.writeOptionalArray(StreamOutput::writeWriteable, null); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java index a88fb83b2300d..5bde01195e35c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java @@ -34,6 +34,7 @@ public class PrevalidateNodeRemovalRequest extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index 5b49a41ed9476..c4e40f1b208b4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -55,7 +55,9 @@ public ClusterUpdateSettingsRequest(StreamInput in) throws IOException { persistentSettings = readSettingsFromStream(in); } - public ClusterUpdateSettingsRequest() {} + public ClusterUpdateSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index 6f6253491c580..91c302c8aa7be 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -31,9 +31,12 @@ public final class ClusterSearchShardsRequest extends MasterNodeReadRequest userMetadata; - public CreateSnapshotRequest() {} + public CreateSnapshotRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new put repository request with the provided snapshot and repository names @@ -87,6 +89,7 @@ public CreateSnapshotRequest() {} * @param snapshot snapshot name */ public CreateSnapshotRequest(String repository, String snapshot) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.snapshot = snapshot; this.repository = repository; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java index b16041da66bf7..67389ea3116d8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java @@ -38,6 +38,7 @@ public class DeleteSnapshotRequest extends MasterNodeRequest private boolean includeIndexNames = true; - public GetSnapshotsRequest() {} + public GetSnapshotsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new get snapshots request with given repository names and list of snapshots @@ -85,6 +87,7 @@ public GetSnapshotsRequest() {} * @param snapshots list of snapshots */ public GetSnapshotsRequest(String[] repositories, String[] snapshots) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.repositories = repositories; this.snapshots = snapshots; } @@ -95,6 +98,7 @@ public GetSnapshotsRequest(String[] repositories, String[] snapshots) { * @param repositories repository names */ public GetSnapshotsRequest(String... repositories) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.repositories = repositories; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java index d8fd55451cc63..7a7cc0c304556 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java @@ -29,6 +29,7 @@ public class GetShardSnapshotRequest extends MasterNodeRequest repositories, ShardId shardId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); assert repositories.isEmpty() == false; assert repositories.stream().noneMatch(Objects::isNull); assert repositories.size() == 1 || repositories.stream().noneMatch(repo -> repo.equals(ALL_REPOSITORIES)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 8d025653d47fe..73339cedb96e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -60,7 +60,9 @@ public class RestoreSnapshotRequest extends MasterNodeRequest { - public PendingClusterTasksRequest() {} + public PendingClusterTasksRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public PendingClusterTasksRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index fac2006b68814..f223d7fb2762f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -65,7 +65,9 @@ public IndicesAliasesRequest(StreamInput in) throws IOException { origin = in.readOptionalString(); } - public IndicesAliasesRequest() {} + public IndicesAliasesRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Request to take one or more actions on one or more indexes and alias combinations. diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java index 9d10065c9c3e9..09071f2e6ea3a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java @@ -34,11 +34,14 @@ public class GetAliasesRequest extends MasterNodeReadRequest private String[] originalAliases = Strings.EMPTY_ARRAY; public GetAliasesRequest(String... aliases) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.aliases = aliases; this.originalAliases = aliases; } - public GetAliasesRequest() {} + public GetAliasesRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * NB prior to 8.12 get-aliases was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until we no diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java index 9427a5fa363ba..9a722f1bce2a9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java @@ -37,12 +37,15 @@ public CloseIndexRequest(StreamInput in) throws IOException { waitForActiveShards = ActiveShardCount.readFrom(in); } - public CloseIndexRequest() {} + public CloseIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new close index request for the specified index. */ public CloseIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index 9cd7d713a3a4c..094fccbc35182 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -266,9 +266,9 @@ ClusterState execute( final var dataStream = clusterState.metadata().dataStreams().get(request.index()); final var backingIndexName = dataStream.getIndices().get(0).getName(); - final var indexNames = dataStream.getFailureIndices().isEmpty() + final var indexNames = dataStream.getFailureIndices().getIndices().isEmpty() ? List.of(backingIndexName) - : List.of(backingIndexName, dataStream.getFailureIndices().get(0).getName()); + : List.of(backingIndexName, dataStream.getFailureIndices().getIndices().get(0).getName()); taskContext.success(getAckListener(indexNames, allocationActionMultiListener)); successfulRequests.put(request, indexNames); return clusterState; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 2ec6db339b6ef..3a78738ae986a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -104,14 +104,16 @@ public CreateIndexRequest(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { origin = in.readString(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); } else { requireDataStream = false; } } - public CreateIndexRequest() {} + public CreateIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a request to create an index. @@ -129,6 +131,7 @@ public CreateIndexRequest(String index) { * @param settings the settings to apply to the index */ public CreateIndexRequest(String index, Settings settings) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.index = index; this.settings = settings; } @@ -487,7 +490,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { out.writeString(origin); } - if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalBoolean(this.requireDataStream); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java index b8206cba8de2a..daceeece4f97b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java @@ -30,7 +30,7 @@ public DeleteDanglingIndexRequest(StreamInput in) throws IOException { } public DeleteDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indexUUID = Objects.requireNonNull(indexUUID, "indexUUID cannot be null"); this.acceptDataLoss = acceptDataLoss; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java index 66378ab9907d8..be2fb10821662 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java @@ -32,7 +32,7 @@ public ImportDanglingIndexRequest(StreamInput in) throws IOException { } public ImportDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indexUUID = Objects.requireNonNull(indexUUID, "indexUUID cannot be null"); this.acceptDataLoss = acceptDataLoss; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java index 87cfc303a289a..2cb431577242d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java @@ -48,7 +48,9 @@ public DeleteIndexRequest(StreamInput in) throws IOException { indicesOptions = IndicesOptions.readIndicesOptions(in); } - public DeleteIndexRequest() {} + public DeleteIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new delete index request for the specified index. @@ -56,6 +58,7 @@ public DeleteIndexRequest() {} * @param index The index to delete. Use "_all" to delete all indices. */ public DeleteIndexRequest(String index) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = new String[] { index }; } @@ -65,6 +68,7 @@ public DeleteIndexRequest(String index) { * @param indices The indices to delete. Use "_all" to delete all indices. */ public DeleteIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index edc6381438635..707286801cf66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -108,13 +108,16 @@ public PutMappingRequest(StreamInput in) throws IOException { writeIndexOnly = in.readBoolean(); } - public PutMappingRequest() {} + public PutMappingRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new put mapping request against one or more indices. If nothing is set then * it will be executed against all indices. */ public PutMappingRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java index fb0745eb72d1f..4bb4578f24459 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java @@ -39,12 +39,15 @@ public OpenIndexRequest(StreamInput in) throws IOException { waitForActiveShards = ActiveShardCount.readFrom(in); } - public OpenIndexRequest() {} + public OpenIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new open index request for the specified index. */ public OpenIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java index caf33a541e92a..9331d7010a6e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java @@ -43,6 +43,7 @@ public AddIndexBlockRequest(StreamInput in) throws IOException { * Constructs a new request for the specified block and indices */ public AddIndexBlockRequest(APIBlock block, String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.block = Objects.requireNonNull(block); this.indices = Objects.requireNonNull(indices); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java index 1649e4587d63c..118f139045971 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java @@ -62,10 +62,10 @@ public ResolveClusterActionRequest(String[] names, IndicesOptions indicesOptions public ResolveClusterActionRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + in.getTransportVersion().toReleaseVersion() ); @@ -78,10 +78,10 @@ public ResolveClusterActionRequest(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java index ee2e3d60dc56e..892b34df2b863 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java @@ -44,10 +44,10 @@ public ResolveClusterActionResponse(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java index 578b4ae547a06..dc2416a1b1baa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java @@ -65,10 +65,10 @@ public ResolveClusterInfo(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index 4284d860d85c0..ed3721b35f3b4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -196,12 +196,11 @@ public static NameResolution resolveRolloverNames( final IndexAbstraction indexAbstraction = currentState.metadata().getIndicesLookup().get(rolloverTarget); return switch (indexAbstraction.getType()) { case ALIAS -> resolveAliasRolloverNames(currentState.metadata(), indexAbstraction, newIndexName); - case DATA_STREAM -> { - if (isFailureStoreRollover) { - yield resolveDataStreamFailureStoreRolloverNames(currentState.metadata(), (DataStream) indexAbstraction); - } - yield resolveDataStreamRolloverNames(currentState.getMetadata(), (DataStream) indexAbstraction); - } + case DATA_STREAM -> resolveDataStreamRolloverNames( + currentState.metadata(), + (DataStream) indexAbstraction, + isFailureStoreRollover + ); default -> // the validate method above prevents this case throw new IllegalStateException("unable to roll over type [" + indexAbstraction.getType().getDisplayName() + "]"); @@ -220,19 +219,15 @@ private static NameResolution resolveAliasRolloverNames(Metadata metadata, Index return new NameResolution(sourceIndexName, unresolvedName, rolloverIndexName); } - private static NameResolution resolveDataStreamRolloverNames(Metadata metadata, DataStream dataStream) { - final IndexMetadata originalWriteIndex = metadata.index(dataStream.getWriteIndex()); - return new NameResolution(originalWriteIndex.getIndex().getName(), null, dataStream.nextWriteIndexAndGeneration(metadata).v1()); - } - - private static NameResolution resolveDataStreamFailureStoreRolloverNames(Metadata metadata, DataStream dataStream) { - assert dataStream.getFailureStoreWriteIndex() != null : "Unable to roll over failure store with no failure store indices"; + private static NameResolution resolveDataStreamRolloverNames(Metadata metadata, DataStream dataStream, boolean isFailureStoreRollover) { + final DataStream.DataStreamIndices dataStreamIndices = dataStream.getDataStreamIndices(isFailureStoreRollover); + assert dataStreamIndices.getWriteIndex() != null : "Unable to roll over dataStreamIndices with no indices"; - final IndexMetadata originalWriteIndex = metadata.index(dataStream.getFailureStoreWriteIndex()); + final IndexMetadata originalWriteIndex = metadata.index(dataStreamIndices.getWriteIndex()); return new NameResolution( originalWriteIndex.getIndex().getName(), null, - dataStream.nextFailureStoreWriteIndexAndGeneration(metadata).v1() + dataStream.nextWriteIndexAndGeneration(metadata, dataStreamIndices).v1() ); } @@ -327,10 +322,9 @@ private RolloverResult rolloverDataStream( templateV2 = systemDataStreamDescriptor.getComposableIndexTemplate(); } - final Index originalWriteIndex = isFailureStoreRollover ? dataStream.getFailureStoreWriteIndex() : dataStream.getWriteIndex(); - final Tuple nextIndexAndGeneration = isFailureStoreRollover - ? dataStream.nextFailureStoreWriteIndexAndGeneration(currentState.metadata()) - : dataStream.nextWriteIndexAndGeneration(currentState.metadata()); + final DataStream.DataStreamIndices dataStreamIndices = dataStream.getDataStreamIndices(isFailureStoreRollover); + final Index originalWriteIndex = dataStreamIndices.getWriteIndex(); + final Tuple nextIndexAndGeneration = dataStream.nextWriteIndexAndGeneration(metadata, dataStreamIndices); final String newWriteIndexName = nextIndexAndGeneration.v1(); final long newGeneration = nextIndexAndGeneration.v2(); MetadataCreateIndexService.validateIndexName(newWriteIndexName, currentState); // fails if the index already exists @@ -438,7 +432,7 @@ yield new DataStreamAutoShardingEvent( metadataBuilder = withShardSizeForecastForWriteIndex(dataStreamName, metadataBuilder); newState = ClusterState.builder(newState).metadata(metadataBuilder).build(); - newState = MetadataDataStreamsService.setRolloverOnWrite(newState, dataStreamName, false); + newState = MetadataDataStreamsService.setRolloverOnWrite(newState, dataStreamName, false, isFailureStoreRollover); return new RolloverResult(newWriteIndexName, originalWriteIndex.getName(), newState); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 28ef2f644af04..09f9411d5a834 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -110,7 +110,7 @@ public RolloverRequest(StreamInput in) throws IOException { dryRun = in.readBoolean(); conditions = new RolloverConditions(in); createIndexRequest = new CreateIndexRequest(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { lazy = in.readBoolean(); } else { lazy = false; @@ -120,9 +120,12 @@ public RolloverRequest(StreamInput in) throws IOException { } } - RolloverRequest() {} + RolloverRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public RolloverRequest(String rolloverTarget, String newIndexName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.rolloverTarget = rolloverTarget; this.newIndexName = newIndexName; } @@ -165,7 +168,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); conditions.writeTo(out); createIndexRequest.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(lazy); } if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_ROLLOVER)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java index 360ea59e6a299..04b9f6498a3a9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.rollover; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -20,8 +21,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.LAZY_ROLLOVER_ADDED; - /** * Response object for {@link RolloverRequest} API * @@ -59,7 +58,7 @@ public final class RolloverResponse extends ShardsAcknowledgedResponse implement dryRun = in.readBoolean(); rolledOver = in.readBoolean(); shardsAcknowledged = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(LAZY_ROLLOVER_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { lazy = in.readBoolean(); } else { lazy = false; @@ -142,7 +141,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); out.writeBoolean(rolledOver); out.writeBoolean(shardsAcknowledged); - if (out.getTransportVersion().onOrAfter(LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(lazy); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index b9ab28dc80e65..abf42cffdaa01 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -169,12 +169,13 @@ protected void masterOperation( assert task instanceof CancellableTask; Metadata metadata = clusterState.metadata(); // We evaluate the names of the index for which we should evaluate conditions, as well as what our newly created index *would* be. + boolean targetFailureStore = rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices(); final MetadataRolloverService.NameResolution trialRolloverNames = MetadataRolloverService.resolveRolloverNames( clusterState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), rolloverRequest.getCreateIndexRequest(), - rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() + targetFailureStore ); final String trialSourceIndexName = trialRolloverNames.sourceName(); final String trialRolloverIndexName = trialRolloverNames.rolloverName(); @@ -200,6 +201,7 @@ protected void masterOperation( metadataDataStreamsService.setRolloverOnWrite( rolloverRequest.getRolloverTarget(), true, + targetFailureStore, rolloverRequest.ackTimeout(), rolloverRequest.masterNodeTimeout(), listener.map( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java index 96cbfc80c8d67..42ff256579984 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java @@ -51,7 +51,9 @@ public GetSettingsRequest includeDefaults(boolean includeDefaults) { return this; } - public GetSettingsRequest() {} + public GetSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetSettingsRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index 7fa2e11317a43..c3e87f2f54cf0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -61,12 +61,15 @@ public UpdateSettingsRequest(StreamInput in) throws IOException { } } - public UpdateSettingsRequest() {} + public UpdateSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new request to update settings for one or more indices */ public UpdateSettingsRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } @@ -74,6 +77,7 @@ public UpdateSettingsRequest(String... indices) { * Constructs a new request to update settings for one or more indices */ public UpdateSettingsRequest(Settings settings, String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; this.settings = settings; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java index 475c9c16f149e..8cf2427e91c15 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java @@ -40,10 +40,13 @@ public class IndicesShardStoresRequest extends MasterNodeReadRequestindices */ public IndicesShardStoresRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.indices = indices; } - public IndicesShardStoresRequest() {} + public IndicesShardStoresRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public IndicesShardStoresRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index c39d2e1114618..ef709fc4457a7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -73,9 +73,12 @@ public ResizeRequest(StreamInput in) throws IOException { } } - ResizeRequest() {} + ResizeRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public ResizeRequest(String targetIndex, String sourceIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.targetIndexRequest = new CreateIndexRequest(targetIndex); this.sourceIndex = sourceIndex; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java index b3f3a0a203df5..3c2416200ce61 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java @@ -28,12 +28,15 @@ public DeleteIndexTemplateRequest(StreamInput in) throws IOException { name = in.readString(); } - public DeleteIndexTemplateRequest() {} + public DeleteIndexTemplateRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new delete index request for the specified name. */ public DeleteIndexTemplateRequest(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java index 593162305f2d0..9ac10d782a605 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java @@ -109,6 +109,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new delete index request for the specified name. */ public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "component templates to delete must not be null"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java index f884c8404d0f2..fa40a901c705b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java @@ -108,6 +108,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new delete template request for the specified name. */ public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "templates to delete must not be null"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index 3d5b4a73e0a57..5483097b140da 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -48,9 +48,12 @@ public static class Request extends MasterNodeReadRequest { private String name; private boolean includeDefaults; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; this.includeDefaults = false; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index aebb9cef12f43..5cb35d23c8b7c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -49,6 +49,7 @@ public static class Request extends MasterNodeReadRequest { * @param name A template name or pattern, or {@code null} to retrieve all templates. */ public Request(@Nullable String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (name != null && name.contains(",")) { throw new IllegalArgumentException("template name may not contain ','"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java index ec7ce037e651c..19c89b0186733 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java @@ -25,6 +25,7 @@ public class GetIndexTemplatesRequest extends MasterNodeReadRequest { private TransportPutComposableIndexTemplateAction.Request indexTemplateRequest; private boolean includeDefaults = false; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String templateName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (templateName == null) { throw new IllegalArgumentException("template name cannot be null"); } @@ -53,6 +56,7 @@ public Request(String templateName) { } public Request(TransportPutComposableIndexTemplateAction.Request indexTemplateRequest) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (indexTemplateRequest == null) { throw new IllegalArgumentException("index template body must be present"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java index 56e7079ec38ba..ebf1e9e74b793 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java @@ -56,6 +56,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new put component template request with the provided name. */ public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 10c9a5e7205b0..6ef887847c270 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -92,12 +92,15 @@ public PutIndexTemplateRequest(StreamInput in) throws IOException { version = in.readOptionalVInt(); } - public PutIndexTemplateRequest() {} + public PutIndexTemplateRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new put index template request with the provided name. */ public PutIndexTemplateRequest(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java index 8d259083a1352..86c6109469477 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java @@ -156,6 +156,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new put index template request with the provided name. */ public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index fcad07d0696f3..7356dc0ea140e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -298,6 +298,10 @@ private void executeBulkRequestsByShard( bulkRequest.getRefreshPolicy(), requests.toArray(new BulkItemRequest[0]) ); + var indexMetadata = clusterState.getMetadata().index(shardId.getIndexName()); + if (indexMetadata != null && indexMetadata.getInferenceFields().isEmpty() == false) { + bulkShardRequest.setInferenceFieldMap(indexMetadata.getInferenceFields()); + } bulkShardRequest.waitForActiveShards(bulkRequest.waitForActiveShards()); bulkShardRequest.timeout(bulkRequest.timeout()); bulkShardRequest.routedBasedOnClusterVersion(clusterState.version()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index bd929b9a2204e..85b7fc03ff667 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; @@ -22,6 +23,7 @@ import org.elasticsearch.transport.RawIndexingDataTransportRequest; import java.io.IOException; +import java.util.Map; import java.util.Set; public final class BulkShardRequest extends ReplicatedWriteRequest @@ -33,6 +35,8 @@ public final class BulkShardRequest extends ReplicatedWriteRequest inferenceFieldMap = null; + public BulkShardRequest(StreamInput in) throws IOException { super(in); items = in.readArray(i -> i.readOptionalWriteable(inpt -> new BulkItemRequest(shardId, inpt)), BulkItemRequest[]::new); @@ -44,6 +48,30 @@ public BulkShardRequest(ShardId shardId, RefreshPolicy refreshPolicy, BulkItemRe setRefreshPolicy(refreshPolicy); } + /** + * Public for test + * Set the transient metadata indicating that this request requires running inference before proceeding. + */ + public void setInferenceFieldMap(Map fieldInferenceMap) { + this.inferenceFieldMap = fieldInferenceMap; + } + + /** + * Consumes the inference metadata to execute inference on the bulk items just once. + */ + public Map consumeInferenceFieldMap() { + Map ret = inferenceFieldMap; + inferenceFieldMap = null; + return ret; + } + + /** + * Public for test + */ + public Map getInferenceFieldMap() { + return inferenceFieldMap; + } + public long totalSizeInBytes() { long totalSizeInBytes = 0; for (int i = 0; i < items.length; i++) { @@ -85,6 +113,10 @@ public String[] indices() { @Override public void writeTo(StreamOutput out) throws IOException { + if (inferenceFieldMap != null) { + // Inferencing metadata should have been consumed as part of the ShardBulkInferenceActionFilter processing + throw new IllegalStateException("Inference metadata should have been consumed before writing to the stream"); + } super.writeTo(out); out.writeArray((o, item) -> { if (item != null) { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java index f9e559fa16ec7..40060d5e5d927 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java @@ -36,11 +36,13 @@ public static class Request extends AcknowledgedRequest implements Indi private final long startTime; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.startTime = System.currentTimeMillis(); } public Request(String name, long startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.startTime = startTime; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java index b68a7d3fcd159..5b79eae0cebfd 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java @@ -47,6 +47,7 @@ public static class Request extends MasterNodeRequest implements Indice private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names); this.wildcardExpressionsOriginallySpecified = Arrays.stream(names).anyMatch(Regex::isSimpleMatchPattern); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 1517b368e21ea..841a2df5eada6 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -58,10 +58,12 @@ public static class Request extends MasterNodeReadRequest implements In private boolean includeDefaults = false; public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; } public Request(String[] names, boolean includeDefaults) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; this.includeDefaults = includeDefaults; } @@ -187,6 +189,7 @@ public static class DataStreamInfo implements SimpleDiffable, To public static final ParseField TIME_SINCE_LAST_AUTO_SHARD_EVENT_MILLIS = new ParseField( "time_since_last_auto_shard_event_millis" ); + public static final ParseField FAILURE_STORE_ENABLED = new ParseField("enabled"); private final DataStream dataStream; private final ClusterHealthStatus dataStreamStatus; @@ -220,7 +223,7 @@ public DataStreamInfo( @SuppressWarnings("unchecked") DataStreamInfo(StreamInput in) throws IOException { this( - new DataStream(in), + DataStream.read(in), ClusterHealthStatus.readFrom(in), in.readOptionalString(), in.readOptionalString(), @@ -298,45 +301,8 @@ public XContentBuilder toXContent( .field(DataStream.NAME_FIELD.getPreferredName(), DataStream.TIMESTAMP_FIELD_NAME) .endObject(); - builder.field(DataStream.INDICES_FIELD.getPreferredName()); - if (dataStream.getIndices() == null) { - builder.nullValue(); - } else { - builder.startArray(); - for (Index index : dataStream.getIndices()) { - builder.startObject(); - index.toXContentFragment(builder); - IndexProperties indexProperties = indexSettingsValues.get(index); - if (indexProperties != null) { - builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); - if (indexProperties.ilmPolicyName() != null) { - builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); - } - builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); - } - builder.endObject(); - } - builder.endArray(); - } + indicesToXContent(builder, dataStream.getIndices()); builder.field(DataStream.GENERATION_FIELD.getPreferredName(), dataStream.getGeneration()); - if (DataStream.isFailureStoreFeatureFlagEnabled()) { - builder.field(DataStream.FAILURE_INDICES_FIELD.getPreferredName()); - builder.startArray(); - for (Index failureStore : dataStream.getFailureIndices()) { - builder.startObject(); - failureStore.toXContentFragment(builder); - IndexProperties indexProperties = indexSettingsValues.get(failureStore); - if (indexProperties != null) { - builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); - if (indexProperties.ilmPolicyName() != null) { - builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); - } - builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); - } - builder.endObject(); - } - builder.endArray(); - } if (dataStream.getMetadata() != null) { builder.field(DataStream.METADATA_FIELD.getPreferredName(), dataStream.getMetadata()); } @@ -359,20 +325,7 @@ public XContentBuilder toXContent( builder.field(ALLOW_CUSTOM_ROUTING.getPreferredName(), dataStream.isAllowCustomRouting()); builder.field(REPLICATED.getPreferredName(), dataStream.isReplicated()); builder.field(ROLLOVER_ON_WRITE.getPreferredName(), dataStream.rolloverOnWrite()); - if (DataStream.isFailureStoreFeatureFlagEnabled()) { - builder.field(DataStream.FAILURE_STORE_FIELD.getPreferredName(), dataStream.isFailureStoreEnabled()); - } - if (dataStream.getAutoShardingEvent() != null) { - DataStreamAutoShardingEvent autoShardingEvent = dataStream.getAutoShardingEvent(); - builder.startObject(AUTO_SHARDING_FIELD.getPreferredName()); - autoShardingEvent.toXContent(builder, params); - builder.humanReadableField( - TIME_SINCE_LAST_AUTO_SHARD_EVENT_MILLIS.getPreferredName(), - TIME_SINCE_LAST_AUTO_SHARD_EVENT.getPreferredName(), - autoShardingEvent.getTimeSinceLastAutoShardingEvent(System::currentTimeMillis) - ); - builder.endObject(); - } + addAutoShardingEvent(builder, params, dataStream.getAutoShardingEvent()); if (timeSeries != null) { builder.startObject(TIME_SERIES.getPreferredName()); builder.startArray(TEMPORAL_RANGES.getPreferredName()); @@ -387,10 +340,56 @@ public XContentBuilder toXContent( builder.endArray(); builder.endObject(); } + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + builder.startObject(DataStream.FAILURE_STORE_FIELD.getPreferredName()); + builder.field(FAILURE_STORE_ENABLED.getPreferredName(), dataStream.isFailureStoreEnabled()); + builder.field( + DataStream.ROLLOVER_ON_WRITE_FIELD.getPreferredName(), + dataStream.getFailureIndices().isRolloverOnWrite() + ); + indicesToXContent(builder, dataStream.getFailureIndices().getIndices()); + addAutoShardingEvent(builder, params, dataStream.getFailureIndices().getAutoShardingEvent()); + builder.endObject(); + } builder.endObject(); return builder; } + private XContentBuilder indicesToXContent(XContentBuilder builder, List indices) throws IOException { + builder.field(DataStream.INDICES_FIELD.getPreferredName()); + builder.startArray(); + for (Index index : indices) { + builder.startObject(); + index.toXContentFragment(builder); + IndexProperties indexProperties = indexSettingsValues.get(index); + if (indexProperties != null) { + builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); + if (indexProperties.ilmPolicyName() != null) { + builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); + } + builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); + } + builder.endObject(); + } + builder.endArray(); + return builder; + } + + private void addAutoShardingEvent(XContentBuilder builder, Params params, DataStreamAutoShardingEvent autoShardingEvent) + throws IOException { + if (autoShardingEvent == null) { + return; + } + builder.startObject(AUTO_SHARDING_FIELD.getPreferredName()); + autoShardingEvent.toXContent(builder, params); + builder.humanReadableField( + TIME_SINCE_LAST_AUTO_SHARD_EVENT_MILLIS.getPreferredName(), + TIME_SINCE_LAST_AUTO_SHARD_EVENT.getPreferredName(), + autoShardingEvent.getTimeSinceLastAutoShardingEvent(System::currentTimeMillis) + ); + builder.endObject(); + } + /** * Computes and returns which system will manage the next generation for this data stream. */ diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java index 3a834273e84cf..226b8d44f636c 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest actions) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.actions = Collections.unmodifiableList(actions); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java index 3b3e644272cbc..0853d30d22de4 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java @@ -35,6 +35,7 @@ public static class Request extends MasterNodeRequest implements In private boolean includeDefaults = false; public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; } public Request(String[] names, boolean includeDefaults) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; this.includeDefaults = includeDefaults; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java index 8156e03b0cdd1..7bb63ae27b526 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java @@ -95,6 +95,7 @@ public Request(String[] names, @Nullable TimeValue dataRetention) { } public Request(String[] names, DataStreamLifecycle lifecycle) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; this.lifecycle = lifecycle; } @@ -104,6 +105,7 @@ public Request(String[] names, @Nullable TimeValue dataRetention, @Nullable Bool } public Request(String[] names, @Nullable TimeValue dataRetention, @Nullable Boolean enabled, @Nullable Downsampling downsampling) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; this.lifecycle = DataStreamLifecycle.newBuilder() .dataRetention(dataRetention) diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index e8e299c58d2eb..7d2b1be79731e 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -50,13 +50,16 @@ public Request( final TimeValue waitTimeout, final DownsampleConfig downsampleConfig ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.sourceIndex = sourceIndex; this.targetIndex = targetIndex; this.waitTimeout = waitTimeout == null ? DEFAULT_WAIT_TIMEOUT : waitTimeout; this.downsampleConfig = downsampleConfig; } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java index da56e20f4e6a4..6c1734bde401f 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java @@ -56,7 +56,7 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nowInMillis = in.readLong(); runtimeFields = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { includeEmptyFields = in.readBoolean(); } else { includeEmptyFields = true; @@ -144,7 +144,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalNamedWriteable(indexFilter); out.writeLong(nowInMillis); out.writeGenericMap(runtimeFields); - if (out.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(includeEmptyFields); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index 4b1c256bdeb71..6fab92219511d 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -63,7 +63,7 @@ public FieldCapabilitiesRequest(StreamInput in) throws IOException { filters = in.readStringArray(); types = in.readStringArray(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { includeEmptyFields = in.readBoolean(); } } @@ -104,7 +104,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(filters); out.writeStringArray(types); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(includeEmptyFields); } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 9d0eeb20dacef..794a3f38b56bb 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -52,7 +52,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.INDEX_REQUEST_NORMALIZED_BYTES_PARSED; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -189,7 +188,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio dynamicTemplates = in.readMap(StreamInput::readString); } if (in.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) - && in.getTransportVersion().before(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { + && in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readBoolean(); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { @@ -201,14 +200,12 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio : new ArrayList<>(possiblyImmutableExecutedPipelines); } } - if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); + normalisedBytesParsed = in.readZLong(); } else { requireDataStream = false; } - if (in.getTransportVersion().onOrAfter(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { - normalisedBytesParsed = in.readZLong(); - } } public IndexRequest() { @@ -772,7 +769,7 @@ private void writeBody(StreamOutput out) throws IOException { } } if (out.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) - && out.getTransportVersion().before(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { + && out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeBoolean(normalisedBytesParsed != -1L); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { @@ -782,10 +779,8 @@ private void writeBody(StreamOutput out) throws IOException { } } - if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(requireDataStream); - } - if (out.getTransportVersion().onOrAfter(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { out.writeZLong(normalisedBytesParsed); } } @@ -867,12 +862,12 @@ public Index getConcreteWriteIndex(IndexAbstraction ia, Metadata metadata) { // Resolve write index and get parent data stream to handle the case of dealing with an alias String defaultWriteIndexName = ia.getWriteIndex().getName(); DataStream dataStream = metadata.getIndicesLookup().get(defaultWriteIndexName).getParentDataStream(); - if (dataStream.getFailureIndices().size() < 1) { + if (dataStream.getFailureIndices().getIndices().size() < 1) { throw new ElasticsearchException( "Attempting to write a document to a failure store but the target data stream does not have one enabled" ); } - return dataStream.getFailureIndices().get(dataStream.getFailureIndices().size() - 1); + return dataStream.getFailureIndices().getIndices().get(dataStream.getFailureIndices().getIndices().size() - 1); } else { // Resolve as normal return ia.getWriteIndex(this, metadata); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index 3810d95872417..4ac4d63ba5de0 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -20,6 +20,7 @@ public class DeletePipelineRequest extends AcknowledgedRequest * Create a new pipeline request with the id and source along with the content type of the source */ public PutPipelineRequest(String id, BytesReference source, XContentType xContentType, Integer version) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.id = Objects.requireNonNull(id); this.source = Objects.requireNonNull(source); this.xContentType = Objects.requireNonNull(xContentType); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 2f307d653f8a4..0db9f3d20d117 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.shard.ShardId; @@ -374,7 +375,17 @@ protected abstract void executePhaseOnShard( protected void fork(final Runnable runnable) { executor.execute(new AbstractRunnable() { @Override - public void onFailure(Exception e) {} + public void onFailure(Exception e) { + logger.error(() -> "unexpected error during [" + task + "]", e); + assert false : e; + } + + @Override + public void onRejection(Exception e) { + // avoid leaks during node shutdown by executing on the current thread if the executor shuts down + assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e; + doRun(); + } @Override protected void doRun() { diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 48c2f1890ba08..e8470ba77632f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -101,6 +101,7 @@ private void doRun() { hit.setInnerHits(Maps.newMapWithExpectedSize(innerHitBuilders.size())); } hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); + assert innerHits.isPooled() == false || hit.isPooled() : "pooled inner hits can only be added to a pooled hit"; innerHits.mustIncRef(); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java index a18d2c6418542..767597625edc6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java @@ -22,7 +22,6 @@ public final class RankFeaturePhase extends SearchPhase { private final SearchPhaseContext context; private final SearchPhaseResults queryPhaseResults; - private final SearchPhaseResults rankPhaseResults; private final AggregatedDfs aggregatedDfs; @@ -39,8 +38,6 @@ public final class RankFeaturePhase extends SearchPhase { this.context = context; this.queryPhaseResults = queryPhaseResults; this.aggregatedDfs = aggregatedDfs; - this.rankPhaseResults = new ArraySearchPhaseResults<>(context.getNumShards()); - context.addReleasable(rankPhaseResults); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 51a8c6ddb3d76..a12d149bbe342 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -1303,8 +1303,8 @@ public SearchPhase newSearchPhase( task, true, searchService.getCoordinatorRewriteContextProvider(timeProvider::absoluteStartMillis), - listener.delegateFailureAndWrap((l, iters) -> { - SearchPhase action = newSearchPhase( + listener.delegateFailureAndWrap( + (l, iters) -> newSearchPhase( task, searchRequest, executor, @@ -1317,30 +1317,32 @@ public SearchPhase newSearchPhase( false, threadPool, clusters - ); - action.start(); - }) - ); - } else { - // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener - // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener - // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) - if (clusters.isCcsMinimizeRoundtrips() == false - && clusters.hasRemoteClusters() - && task.getProgressListener() == SearchProgressListener.NOOP) { - task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); - } - final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( - executor, - circuitBreaker, - task::isCancelled, - task.getProgressListener(), - searchRequest, - shardIterators.size(), - exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ).start() + ) ); + } + // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener + // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener + // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) + if (clusters.isCcsMinimizeRoundtrips() == false + && clusters.hasRemoteClusters() + && task.getProgressListener() == SearchProgressListener.NOOP) { + task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); + } + final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( + executor, + circuitBreaker, + task::isCancelled, + task.getProgressListener(), + searchRequest, + shardIterators.size(), + exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ); + boolean success = false; + try { + final SearchPhase searchPhase; if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { - return new SearchDfsQueryThenFetchAsyncAction( + searchPhase = new SearchDfsQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1359,7 +1361,7 @@ public SearchPhase newSearchPhase( ); } else { assert searchRequest.searchType() == QUERY_THEN_FETCH : searchRequest.searchType(); - return new SearchQueryThenFetchAsyncAction( + searchPhase = new SearchQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1377,6 +1379,12 @@ public SearchPhase newSearchPhase( clusters ); } + success = true; + return searchPhase; + } finally { + if (success == false) { + queryResultConsumer.close(); + } } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index 7e271536be9fe..b6389d0b112b6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -34,17 +34,21 @@ public abstract class AcknowledgedRequest + * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. + * + * @param ackTimeout specifies how long to wait for all relevant nodes to apply a cluster state update and acknowledge this to + * the elected master. */ - protected AcknowledgedRequest(TimeValue ackTimeout) { + protected AcknowledgedRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout) { + super(masterNodeTimeout); this.ackTimeout = Objects.requireNonNull(ackTimeout); } @@ -94,6 +98,8 @@ public Plain(StreamInput in) throws IOException { super(in); } - public Plain() {} + public Plain() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java index 7f4100473c42c..92788f53279d5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import java.io.IOException; @@ -20,7 +21,20 @@ public abstract class MasterNodeReadRequest + * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. + */ + protected MasterNodeReadRequest(TimeValue masterNodeTimeout) { + super(masterNodeTimeout); + } protected MasterNodeReadRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index 063dbb0397de8..1b3dca31689e2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -21,15 +21,36 @@ */ public abstract class MasterNodeRequest> extends ActionRequest { - public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); - - private TimeValue masterNodeTimeout = DEFAULT_MASTER_NODE_TIMEOUT; + /** + * The default timeout for master-node requests. It's super-trappy to have such a default, because it makes it all too easy to forget + * to add a mechanism by which clients can change it. Without such a mechanism things will work fine until we encounter a large cluster + * that is struggling to process cluster state updates fast enough, and it's a disaster if we cannot extend the master-node timeout in + * those cases. We shouldn't use this any more and should work towards removing it. + *

+ * For requests which originate in the REST layer, use {@link org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the + * timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link TimeValue#MAX_VALUE} (or {@link + * TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) since usually we want internal requests to wait for as long + * as necessary to complete. + * + * @deprecated all requests should specify a timeout, see #107984. + */ + @Deprecated(forRemoval = true) + public static final TimeValue TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); - protected MasterNodeRequest() {} + private TimeValue masterNodeTimeout; /** * @param masterNodeTimeout Specifies how long to wait when the master has not been discovered yet, or is disconnected, or is busy - * processing other tasks. The value {@link TimeValue#MINUS_ONE} means to wait forever. + * processing other tasks. The value {@link TimeValue#MINUS_ONE} means to wait forever in 8.15.0 onwards. + *

+ * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. */ protected MasterNodeRequest(TimeValue masterNodeTimeout) { this.masterNodeTimeout = Objects.requireNonNull(masterNodeTimeout); @@ -49,7 +70,14 @@ public void writeTo(StreamOutput out) throws IOException { /** * Specifies how long to wait when the master has not been discovered yet, or is disconnected, or is busy processing other tasks. The - * value {@link TimeValue#MINUS_ONE} means to wait forever. + * value {@link TimeValue#MINUS_ONE} means to wait forever in 8.15.0 onwards. + *

+ * For requests which originate in the REST layer, use {@link org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the + * timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link TimeValue#MAX_VALUE} (or {@link + * TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) since usually we want internal requests to wait for as long + * as necessary to complete. */ @SuppressWarnings("unchecked") public final Request masterNodeTimeout(TimeValue timeout) { diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 0dbe66822d311..e88ebbdc07688 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; @@ -322,11 +323,25 @@ public void onTimeout(TimeValue timeout) { logger.debug(() -> format("timed out while retrying [%s] after failure (timeout [%s])", actionName, timeout), failure); listener.onFailure(new MasterNotDiscoveredException(failure)); } + + @Override + public String toString() { + return Strings.format( + "listener for [%s] retrying after cluster state version [%d]", + AsyncSingleAction.this, + currentStateVersion + ); + } }, clusterState -> isTaskCancelled() || statePredicate.test(clusterState)); } private boolean isTaskCancelled() { - return task instanceof CancellableTask && ((CancellableTask) task).isCancelled(); + return task instanceof CancellableTask cancellableTask && cancellableTask.isCancelled(); + } + + @Override + public String toString() { + return Strings.format("execution of [%s]", task); } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java index 00384852d1472..94ba504c8b175 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java @@ -26,10 +26,13 @@ public abstract class ClusterInfoRequest readNodeIdsForRemoval(StreamInput in) throws IOException { - return in.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED) + return in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readCollectionAsImmutableSet(StreamInput::readString) : Set.of(); } @@ -246,7 +244,7 @@ public void writeTo(StreamOutput out) throws IOException { while (iterator.hasNext()) { iterator.next().writeTo(out); } - if (out.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeStringCollection(nodesIdsForRemoval); } else { assert nodesIdsForRemoval.isEmpty() : nodesIdsForRemoval; @@ -433,7 +431,7 @@ private static boolean assertShardStateConsistent( * running shard snapshots. */ public SnapshotsInProgress withUpdatedNodeIdsForRemoval(ClusterState clusterState) { - assert clusterState.getMinTransportVersion().onOrAfter(TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED); + assert clusterState.getMinTransportVersion().onOrAfter(TransportVersions.V_8_13_0); final var updatedNodeIdsForRemoval = new HashSet<>(nodesIdsForRemoval); @@ -1709,7 +1707,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { new SimpleDiffable.CompleteDiff<>(after).writeTo(out); } - if (out.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeStringCollection(nodeIdsForRemoval); } else { assert nodeIdsForRemoval.isEmpty() : nodeIdsForRemoval; diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index c2cd403836593..b46b79754be7a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -43,9 +43,16 @@ import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.monitor.StatusInfo.Status.UNHEALTHY; +/** + * Handles periodic debug logging of information regarding why the cluster has failed to form. + * Periodic logging begins once {@link #start()} is called, and ceases on {@link #stop()}. + */ public class ClusterFormationFailureHelper { private static final Logger logger = LogManager.getLogger(ClusterFormationFailureHelper.class); + /** + * This time period controls how often warning log messages will be written if this node fails to join or form a cluster. + */ public static final Setting DISCOVERY_CLUSTER_FORMATION_WARNING_TIMEOUT_SETTING = Setting.timeSetting( "discovery.cluster_formation_warning_timeout", TimeValue.timeValueMillis(10000), @@ -61,6 +68,16 @@ public class ClusterFormationFailureHelper { @Nullable // if no warning is scheduled private volatile WarningScheduler warningScheduler; + /** + * Works with the {@link JoinHelper} to log the latest node-join attempt failure and cluster state debug information. Must call + * {@link ClusterFormationState#start()} to begin. + * + * @param settings provides the period in which to log cluster formation errors. + * @param clusterFormationStateSupplier information about the current believed cluster state (See {@link ClusterFormationState}) + * @param threadPool the thread pool on which to run debug logging + * @param logLastFailedJoinAttempt invokes an instance of the JoinHelper to log the last encountered join failure + * (See {@link JoinHelper#logLastFailedJoinAttempt()}) + */ public ClusterFormationFailureHelper( Settings settings, Supplier clusterFormationStateSupplier, @@ -78,6 +95,10 @@ public boolean isRunning() { return warningScheduler != null; } + /** + * Schedules a warning debug message to be logged in 'clusterFormationWarningTimeout' time, and periodically thereafter, until + * {@link ClusterFormationState#stop()} has been called. + */ public void start() { assert warningScheduler == null; warningScheduler = new WarningScheduler(); @@ -129,7 +150,7 @@ public String toString() { } /** - * If this node believes that cluster formation has failed, this record provides information that can be used to determine why that is. + * This record provides node state information that can be used to determine why cluster formation has failed. */ public record ClusterFormationState( List initialMasterNodesSetting, @@ -220,7 +241,7 @@ public ClusterFormationState(StreamInput in) throws IOException { new VotingConfiguration(in), in.readCollectionAsImmutableList(TransportAddress::new), in.readCollectionAsImmutableList(DiscoveryNode::new), - in.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readCollectionAsImmutableSet(DiscoveryNode::new) : Set.of(), in.readLong(), @@ -402,7 +423,7 @@ public void writeTo(StreamOutput out) throws IOException { lastCommittedConfiguration.writeTo(out); out.writeCollection(resolvedAddresses); out.writeCollection(foundPeers); - if (out.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeCollection(mastersOfPeers); } out.writeLong(currentTerm); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 156ba88a7d2b1..daff05f0fb19b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -1781,7 +1781,7 @@ public void run() { final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); if (nodeEligibility.mayWin() == false) { assert nodeEligibility.reason().isEmpty() == false; - logger.trace( + logger.info( "skip prevoting as local node may not win election ({}): {}", nodeEligibility.reason(), lastAcceptedState.coordinationMetadata() diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index b960bb02ceb7f..059400ad81cfb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -194,13 +194,23 @@ private void unregisterAndReleaseConnection(DiscoveryNode destination, Releasabl Releasables.close(connectionReference); } - // package-private for testing + /** + * Saves information about a join failure. The failure information may be logged later via either {@link FailedJoinAttempt#logNow} + * or {@link FailedJoinAttempt#lastFailedJoinAttempt}. + * + * Package-private for testing. + */ static class FailedJoinAttempt { private final DiscoveryNode destination; private final JoinRequest joinRequest; private final ElasticsearchException exception; private final long timestamp; + /** + * @param destination the master node targeted by the join request. + * @param joinRequest the join request that was sent to the perceived master node. + * @param exception the error response received in reply to the join request attempt. + */ FailedJoinAttempt(DiscoveryNode destination, JoinRequest joinRequest, ElasticsearchException exception) { this.destination = destination; this.joinRequest = joinRequest; @@ -208,10 +218,18 @@ static class FailedJoinAttempt { this.timestamp = System.nanoTime(); } + /** + * Logs the failed join attempt exception. + * {@link FailedJoinAttempt#getLogLevel(ElasticsearchException)} determines at what log-level the log is written. + */ void logNow() { logger.log(getLogLevel(exception), () -> format("failed to join %s with %s", destination, joinRequest), exception); } + /** + * Returns the appropriate log level based on the given exception. Every error is at least DEBUG, but unexpected errors are INFO. + * For example, NotMasterException and CircuitBreakingExceptions are DEBUG logs. + */ static Level getLogLevel(ElasticsearchException e) { Throwable cause = e.unwrapCause(); if (cause instanceof CoordinationStateRejectedException @@ -226,6 +244,10 @@ void logWarnWithTimestamp() { logger.warn( () -> format( "last failed join attempt was %s ago, failed to join %s with %s", + // 'timestamp' is when this error exception was received by the local node. If the time that has passed since the error + // was originally received is quite large, it could indicate that this is a stale error exception from some prior + // out-of-order request response (where a later sent request but earlier received response was successful); or + // alternatively an old error could indicate that this node did not retry the join request for a very long time. TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - timestamp)), destination, joinRequest @@ -235,6 +257,9 @@ void logWarnWithTimestamp() { } } + /** + * Logs a warning message if {@link #lastFailedJoinAttempt} has been set with a failure. + */ void logLastFailedJoinAttempt() { FailedJoinAttempt attempt = lastFailedJoinAttempt.get(); if (attempt != null) { @@ -247,7 +272,7 @@ public void sendJoinRequest(DiscoveryNode destination, long term, Optional assert destination.isMasterNode() : "trying to join master-ineligible " + destination; final StatusInfo statusInfo = nodeHealthService.getHealth(); if (statusInfo.getStatus() == UNHEALTHY) { - logger.debug("dropping join request to [{}]: [{}]", destination, statusInfo.getInfo()); + logger.debug("dropping join request to [{}], unhealthy status: [{}]", destination, statusInfo.getInfo()); return; } final JoinRequest joinRequest = new JoinRequest( diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java index 19113bc770000..6e0e7d8dda5a5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java @@ -8,6 +8,7 @@ package org.elasticsearch.cluster.coordination; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -19,7 +20,14 @@ public record JoinStatus(DiscoveryNode remoteNode, long term, String message, TimeValue age) implements Writeable { public JoinStatus(StreamInput in) throws IOException { - this(new DiscoveryNode(in), in.readLong(), in.readString(), new TimeValue(in.readLong(), TimeUnit.valueOf(in.readString()))); + this( + new DiscoveryNode(in), + in.readLong(), + in.readString(), + in.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION) + ? in.readTimeValue() + : new TimeValue(in.readLong(), TimeUnit.valueOf(in.readString())) + ); } @Override @@ -27,7 +35,11 @@ public void writeTo(StreamOutput out) throws IOException { remoteNode.writeTo(out); out.writeLong(term); out.writeString(message); - out.writeLong(age.duration()); - out.writeString(age.timeUnit().name()); + if (out.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION)) { + out.writeTimeValue(age); + } else { + out.writeLong(age.duration()); + out.writeString(age.timeUnit().name()); + } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 2c024063e2399..9223e02fc946c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -123,7 +124,14 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex newState = ClusterState.builder(initialState); } else { logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode()); - throw new NotMasterException("Node [" + currentNodes.getLocalNode() + "] not master for join request"); + throw new NotMasterException( + Strings.format( + "Node [%s] not master for join request. Current known master [%s], current term [%d]", + currentNodes.getLocalNode(), + currentNodes.getMasterNode(), + term + ) + ); } DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(newState.nodes()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 16ad072f271ff..a67552c30d5a5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling.Round; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -52,7 +51,6 @@ import java.util.Collection; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -102,7 +100,6 @@ public static boolean isFailureStoreFeatureFlagEnabled() { private final LongSupplier timeProvider; private final String name; - private final List indices; private final long generation; @Nullable private final Map metadata; @@ -114,12 +111,10 @@ public static boolean isFailureStoreFeatureFlagEnabled() { private final IndexMode indexMode; @Nullable private final DataStreamLifecycle lifecycle; - private final boolean rolloverOnWrite; private final boolean failureStoreEnabled; - private final List failureIndices; - private volatile Set failureStoreLookup; - @Nullable - private final DataStreamAutoShardingEvent autoShardingEvent; + + private final DataStreamIndices backingIndices; + private final DataStreamIndices failureIndices; public DataStream( String name, @@ -139,7 +134,6 @@ public DataStream( ) { this( name, - indices, generation, metadata, hidden, @@ -150,16 +144,14 @@ public DataStream( indexMode, lifecycle, failureStoreEnabled, - failureIndices, - rolloverOnWrite, - autoShardingEvent + new DataStreamIndices(BACKING_INDEX_PREFIX, List.copyOf(indices), rolloverOnWrite, autoShardingEvent), + new DataStreamIndices(FAILURE_STORE_PREFIX, List.copyOf(failureIndices), false, null) ); } // visible for testing DataStream( String name, - List indices, long generation, Map metadata, boolean hidden, @@ -170,13 +162,10 @@ public DataStream( IndexMode indexMode, DataStreamLifecycle lifecycle, boolean failureStoreEnabled, - List failureIndices, - boolean rolloverOnWrite, - @Nullable DataStreamAutoShardingEvent autoShardingEvent + DataStreamIndices backingIndices, + DataStreamIndices failureIndices ) { this.name = name; - this.indices = List.copyOf(indices); - assert indices.isEmpty() == false; this.generation = generation; this.metadata = metadata; assert system == false || hidden; // system indices must be hidden @@ -188,21 +177,11 @@ public DataStream( this.indexMode = indexMode; this.lifecycle = lifecycle; this.failureStoreEnabled = failureStoreEnabled; + assert backingIndices.indices.isEmpty() == false; + assert replicated == false || (backingIndices.rolloverOnWrite == false && failureIndices.rolloverOnWrite == false) + : "replicated data streams cannot be marked for lazy rollover"; + this.backingIndices = backingIndices; this.failureIndices = failureIndices; - assert assertConsistent(this.indices); - assert replicated == false || rolloverOnWrite == false : "replicated data streams cannot be marked for lazy rollover"; - this.rolloverOnWrite = rolloverOnWrite; - this.autoShardingEvent = autoShardingEvent; - } - - private static boolean assertConsistent(List indices) { - assert indices.size() > 0; - final Set indexNames = new HashSet<>(); - for (Index index : indices) { - final boolean added = indexNames.add(index.getName()); - assert added : "found duplicate index entries in " + indices; - } - return true; } @Override @@ -222,20 +201,16 @@ public boolean isDataStreamRelated() { @Override public List getIndices() { - return indices; + return backingIndices.indices; } public long getGeneration() { return generation; } - public List getFailureIndices() { - return failureIndices; - } - @Override public Index getWriteIndex() { - return indices.get(indices.size() - 1); + return backingIndices.getWriteIndex(); } /** @@ -243,29 +218,18 @@ public Index getWriteIndex() { */ @Nullable public Index getFailureStoreWriteIndex() { - return isFailureStoreEnabled() == false || failureIndices.isEmpty() ? null : failureIndices.get(failureIndices.size() - 1); + return isFailureStoreEnabled() == false || failureIndices.indices.isEmpty() ? null : failureIndices.getWriteIndex(); } /** * Returns true if the index name provided belongs to a failure store index. - * This method builds a local Set with all the failure store index names and then checks if it contains the name. - * This will perform better if there are multiple indices of this data stream checked. */ public boolean isFailureStoreIndex(String indexName) { - if (failureStoreLookup == null) { - // There is a chance this will be calculated twice, but it's a relatively cheap action, - // so it's not worth synchronising - if (failureIndices == null || failureIndices.isEmpty()) { - failureStoreLookup = Set.of(); - } else { - failureStoreLookup = failureIndices.stream().map(Index::getName).collect(Collectors.toSet()); - } - } - return failureStoreLookup.contains(indexName); + return failureIndices.containsIndex(indexName); } public boolean rolloverOnWrite() { - return rolloverOnWrite; + return backingIndices.rolloverOnWrite; } /** @@ -275,8 +239,8 @@ public boolean rolloverOnWrite() { * an end time that is less than the provided timestamp. Otherwise null is returned. */ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { - for (int i = indices.size() - 1; i >= 0; i--) { - Index index = indices.get(i); + for (int i = backingIndices.indices.size() - 1; i >= 0; i--) { + Index index = backingIndices.indices.get(i); IndexMetadata im = metadata.index(index); // TODO: make index_mode, start and end time fields in IndexMetadata class. @@ -306,7 +270,7 @@ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { public void validate(Function imSupplier) { if (indexMode == IndexMode.TIME_SERIES) { // Get a sorted overview of each backing index with there start and end time range: - var startAndEndTimes = indices.stream().map(index -> { + var startAndEndTimes = backingIndices.indices.stream().map(index -> { IndexMetadata im = imSupplier.apply(index.getName()); if (im == null) { throw new IllegalStateException("index [" + index.getName() + "] is not found in the index metadata supplier"); @@ -407,7 +371,19 @@ public DataStreamLifecycle getLifecycle() { * Returns the latest auto sharding event that happened for this data stream */ public DataStreamAutoShardingEvent getAutoShardingEvent() { - return autoShardingEvent; + return backingIndices.autoShardingEvent; + } + + public DataStreamIndices getBackingIndices() { + return backingIndices; + } + + public DataStreamIndices getFailureIndices() { + return failureIndices; + } + + public DataStreamIndices getDataStreamIndices(boolean failureStore) { + return failureStore ? this.failureIndices : backingIndices; } /** @@ -446,15 +422,11 @@ public DataStream unsafeRollover(Index writeIndex, long generation, boolean time indexMode = null; } - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); backingIndices.add(writeIndex); - return copy().setIndices(backingIndices) - .setGeneration(generation) - .setReplicated(false) - .setIndexMode(indexMode) - .setAutoShardingEvent(autoShardingEvent) - .setRolloverOnWrite(false) - .build(); + return copy().setBackingIndices( + this.backingIndices.copy().setIndices(backingIndices).setAutoShardingEvent(autoShardingEvent).setRolloverOnWrite(false).build() + ).setGeneration(generation).setIndexMode(indexMode).build(); } /** @@ -475,56 +447,32 @@ public DataStream rolloverFailureStore(Index writeIndex, long generation) { * Like {@link #rolloverFailureStore(Index, long)}, but does no validation, use with care only. */ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) { - List failureIndices = new ArrayList<>(this.failureIndices); + List failureIndices = new ArrayList<>(this.failureIndices.indices); failureIndices.add(writeIndex); - return copy().setGeneration(generation).setReplicated(false).setFailureIndices(failureIndices).build(); + return copy().setGeneration(generation).setFailureIndices(this.failureIndices.copy().setIndices(failureIndices).build()).build(); } /** * Generates the next write index name and generation to be used for rolling over this data stream. * * @param clusterMetadata Cluster metadata + * @param dataStreamIndices The data stream indices that we're generating the next write index name and generation for * @return tuple of the next write index name and next generation. */ - public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata) { - ensureNotReplicated(); - return unsafeNextWriteIndexAndGeneration(clusterMetadata); - } - - /** - * Like {@link #nextWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. - */ - public Tuple unsafeNextWriteIndexAndGeneration(Metadata clusterMetadata) { - return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultBackingIndexName); - } - - /** - * Generates the next write index name and generation to be used for rolling over the failure store of this data stream. - * - * @param clusterMetadata Cluster metadata - * @return tuple of the next failure store write index name and next generation. - */ - public Tuple nextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { + public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata, DataStreamIndices dataStreamIndices) { ensureNotReplicated(); - return unsafeNextFailureStoreWriteIndexAndGeneration(clusterMetadata); + return unsafeNextWriteIndexAndGeneration(clusterMetadata, dataStreamIndices); } /** - * Like {@link #nextFailureStoreWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. + * Like {@link #nextWriteIndexAndGeneration(Metadata, DataStreamIndices)}, but does no validation, use with care only. */ - public Tuple unsafeNextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { - return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultFailureStoreName); - } - - private Tuple generateNextWriteIndexAndGeneration( - Metadata clusterMetadata, - TriFunction nameGenerator - ) { + public Tuple unsafeNextWriteIndexAndGeneration(Metadata clusterMetadata, DataStreamIndices dataStreamIndices) { String newWriteIndexName; long generation = this.generation; long currentTimeMillis = timeProvider.getAsLong(); do { - newWriteIndexName = nameGenerator.apply(getName(), ++generation, currentTimeMillis); + newWriteIndexName = dataStreamIndices.generateName(name, ++generation, currentTimeMillis); } while (clusterMetadata.hasIndexAbstraction(newWriteIndexName)); return Tuple.tuple(newWriteIndexName, generation); } @@ -544,14 +492,14 @@ private void ensureNotReplicated() { * @throws IllegalArgumentException if {@code index} is not a backing index or is the current write index of the data stream */ public DataStream removeBackingIndex(Index index) { - int backingIndexPosition = indices.indexOf(index); + int backingIndexPosition = backingIndices.indices.indexOf(index); if (backingIndexPosition == -1) { throw new IllegalArgumentException( String.format(Locale.ROOT, "index [%s] is not part of data stream [%s]", index.getName(), name) ); } - if (indices.size() == (backingIndexPosition + 1)) { + if (backingIndices.indices.size() == (backingIndexPosition + 1)) { throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -562,10 +510,12 @@ public DataStream removeBackingIndex(Index index) { ); } - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); backingIndices.remove(index); - assert backingIndices.size() == indices.size() - 1; - return copy().setIndices(backingIndices).setGeneration(generation + 1).build(); + assert backingIndices.size() == this.backingIndices.indices.size() - 1; + return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -578,7 +528,7 @@ public DataStream removeBackingIndex(Index index) { * data stream */ public DataStream removeFailureStoreIndex(Index index) { - int failureIndexPosition = failureIndices.indexOf(index); + int failureIndexPosition = failureIndices.indices.indexOf(index); if (failureIndexPosition == -1) { throw new IllegalArgumentException( @@ -588,7 +538,7 @@ public DataStream removeFailureStoreIndex(Index index) { // TODO: When failure stores are lazily created, this wont necessarily be required anymore. We can remove the failure store write // index as long as we mark the data stream to lazily rollover the failure store with no conditions on its next write - if (failureIndices.size() == (failureIndexPosition + 1)) { + if (failureIndices.indices.size() == (failureIndexPosition + 1)) { throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -599,10 +549,12 @@ public DataStream removeFailureStoreIndex(Index index) { ); } - List updatedFailureIndices = new ArrayList<>(failureIndices); + List updatedFailureIndices = new ArrayList<>(failureIndices.indices); updatedFailureIndices.remove(index); - assert updatedFailureIndices.size() == failureIndices.size() - 1; - return copy().setGeneration(generation + 1).setFailureIndices(updatedFailureIndices).build(); + assert updatedFailureIndices.size() == failureIndices.indices.size() - 1; + return copy().setFailureIndices(failureIndices.copy().setIndices(updatedFailureIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -616,14 +568,14 @@ public DataStream removeFailureStoreIndex(Index index) { * existing index. */ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBackingIndex) { - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); int backingIndexPosition = backingIndices.indexOf(existingBackingIndex); if (backingIndexPosition == -1) { throw new IllegalArgumentException( String.format(Locale.ROOT, "index [%s] is not part of data stream [%s]", existingBackingIndex.getName(), name) ); } - if (indices.size() == (backingIndexPosition + 1)) { + if (this.backingIndices.indices.size() == (backingIndexPosition + 1)) { throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -634,7 +586,9 @@ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBacki ); } backingIndices.set(backingIndexPosition, newBackingIndex); - return copy().setIndices(backingIndices).setGeneration(generation + 1).build(); + return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -656,10 +610,12 @@ public DataStream addBackingIndex(Metadata clusterMetadata, Index index) { // ensure that no aliases reference index ensureNoAliasesOnIndex(clusterMetadata, index); - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); backingIndices.add(0, index); - assert backingIndices.size() == indices.size() + 1; - return copy().setIndices(backingIndices).setGeneration(generation + 1).build(); + assert backingIndices.size() == this.backingIndices.indices.size() + 1; + return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -680,10 +636,12 @@ public DataStream addFailureStoreIndex(Metadata clusterMetadata, Index index) { ensureNoAliasesOnIndex(clusterMetadata, index); - List updatedFailureIndices = new ArrayList<>(failureIndices); + List updatedFailureIndices = new ArrayList<>(failureIndices.indices); updatedFailureIndices.add(0, index); - assert updatedFailureIndices.size() == failureIndices.size() + 1; - return copy().setGeneration(generation + 1).setFailureIndices(updatedFailureIndices).build(); + assert updatedFailureIndices.size() == failureIndices.indices.size() + 1; + return copy().setFailureIndices(failureIndices.copy().setIndices(updatedFailureIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -742,7 +700,7 @@ public DataStream promoteDataStream() { @Nullable public DataStream snapshot(Collection indicesInSnapshot) { // do not include indices not available in the snapshot - List reconciledIndices = new ArrayList<>(this.indices); + List reconciledIndices = new ArrayList<>(this.backingIndices.indices); if (reconciledIndices.removeIf(x -> indicesInSnapshot.contains(x.getName()) == false) == false) { return this; } @@ -751,7 +709,9 @@ public DataStream snapshot(Collection indicesInSnapshot) { return null; } - return copy().setIndices(reconciledIndices).setMetadata(metadata == null ? null : new HashMap<>(metadata)).build(); + return copy().setBackingIndices(backingIndices.copy().setIndices(reconciledIndices).build()) + .setMetadata(metadata == null ? null : new HashMap<>(metadata)) + .build(); } /** @@ -792,7 +752,7 @@ public List getDownsamplingRoundsFor( Function indexMetadataSupplier, LongSupplier nowSupplier ) { - assert indices.contains(index) : "the provided index must be a backing index for this datastream"; + assert backingIndices.indices.contains(index) : "the provided index must be a backing index for this datastream"; if (lifecycle == null || lifecycle.getDownsamplingRounds() == null) { return List.of(); } @@ -831,7 +791,7 @@ public List getNonWriteIndicesOlderThan( LongSupplier nowSupplier ) { List olderIndices = new ArrayList<>(); - for (Index index : indices) { + for (Index index : backingIndices.indices) { if (isIndexOderThan(index, retentionPeriod.getMillis(), nowSupplier.getAsLong(), indicesPredicate, indexMetadataSupplier)) { olderIndices.add(index); } @@ -864,7 +824,7 @@ private boolean isIndexOderThan( * we return false. */ public boolean isIndexManagedByDataStreamLifecycle(Index index, Function indexMetadataSupplier) { - if (indices.contains(index) == false) { + if (backingIndices.indices.contains(index) == false) { return false; } IndexMetadata indexMetadata = indexMetadataSupplier.apply(index.getName()); @@ -936,13 +896,7 @@ public static String getDefaultBackingIndexName(String dataStreamName, long gene * @return backing index name */ public static String getDefaultBackingIndexName(String dataStreamName, long generation, long epochMillis) { - return String.format( - Locale.ROOT, - BACKING_INDEX_PREFIX + "%s-%s-%06d", - dataStreamName, - DATE_FORMATTER.formatMillis(epochMillis), - generation - ); + return getDefaultIndexName(BACKING_INDEX_PREFIX, dataStreamName, generation, epochMillis); } /** @@ -955,33 +909,65 @@ public static String getDefaultBackingIndexName(String dataStreamName, long gene * @return backing index name */ public static String getDefaultFailureStoreName(String dataStreamName, long generation, long epochMillis) { - return String.format( - Locale.ROOT, - FAILURE_STORE_PREFIX + "%s-%s-%06d", - dataStreamName, - DATE_FORMATTER.formatMillis(epochMillis), - generation - ); + return getDefaultIndexName(FAILURE_STORE_PREFIX, dataStreamName, generation, epochMillis); } - public DataStream(StreamInput in) throws IOException { - this( - readName(in), - readIndices(in), - in.readVLong(), - in.readGenericMap(), - in.readBoolean(), - in.readBoolean(), - in.readBoolean(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) ? in.readBoolean() : false, - in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) ? in.readOptionalEnum(IndexMode.class) : null, - in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null, - in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? in.readBoolean() : false, - in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? readIndices(in) : List.of(), - in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED) ? in.readBoolean() : false, - in.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION) - ? in.readOptionalWriteable(DataStreamAutoShardingEvent::new) - : null + /** + * Generates the name of the index that conforms to the default naming convention for indices + * on data streams given the specified prefix, data stream name, generation, and time. + * + * @param prefix the prefix that the index name should have + * @param dataStreamName name of the data stream + * @param generation generation of the data stream + * @param epochMillis creation time for the backing index + * @return backing index name + */ + private static String getDefaultIndexName(String prefix, String dataStreamName, long generation, long epochMillis) { + return String.format(Locale.ROOT, prefix + "%s-%s-%06d", dataStreamName, DATE_FORMATTER.formatMillis(epochMillis), generation); + } + + public static DataStream read(StreamInput in) throws IOException { + var name = readName(in); + var backingIndicesBuilder = DataStreamIndices.backingIndicesBuilder(readIndices(in)); + var generation = in.readVLong(); + var metadata = in.readGenericMap(); + var hidden = in.readBoolean(); + var replicated = in.readBoolean(); + var system = in.readBoolean(); + var allowCustomRouting = in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) ? in.readBoolean() : false; + var indexMode = in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) ? in.readOptionalEnum(IndexMode.class) : null; + var lifecycle = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) + ? in.readOptionalWriteable(DataStreamLifecycle::new) + : null; + var failureStoreEnabled = in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) + ? in.readBoolean() + : false; + var failureIndices = in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) + ? readIndices(in) + : List.of(); + var failureIndicesBuilder = DataStreamIndices.failureIndicesBuilder(failureIndices); + backingIndicesBuilder.setRolloverOnWrite(in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readBoolean() : false); + if (in.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { + backingIndicesBuilder.setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); + } + if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_FIELD_PARITY)) { + failureIndicesBuilder.setRolloverOnWrite(in.readBoolean()) + .setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); + } + return new DataStream( + name, + generation, + metadata, + hidden, + replicated, + system, + System::currentTimeMillis, + allowCustomRouting, + indexMode, + lifecycle, + failureStoreEnabled, + backingIndicesBuilder.build(), + failureIndicesBuilder.build() ); } @@ -996,14 +982,14 @@ static List readIndices(StreamInput in) throws IOException { } public static Diff readDiffFrom(StreamInput in) throws IOException { - return SimpleDiffable.readDiffFrom(DataStream::new, in); + return SimpleDiffable.readDiffFrom(DataStream::read, in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(TIMESTAMP_FIELD_NAME); // TODO: clear this out in the future https://github.com/elastic/elasticsearch/issues/101991 - out.writeCollection(indices); + out.writeCollection(backingIndices.indices); out.writeVLong(generation); out.writeGenericMap(metadata); out.writeBoolean(hidden); @@ -1020,13 +1006,17 @@ public void writeTo(StreamOutput out) throws IOException { } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION)) { out.writeBoolean(failureStoreEnabled); - out.writeCollection(failureIndices); + out.writeCollection(failureIndices.indices); } - if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { - out.writeBoolean(rolloverOnWrite); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + out.writeBoolean(backingIndices.rolloverOnWrite); } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { - out.writeOptionalWriteable(autoShardingEvent); + out.writeOptionalWriteable(backingIndices.autoShardingEvent); + } + if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_FIELD_PARITY)) { + out.writeBoolean(failureIndices.rolloverOnWrite); + out.writeOptionalWriteable(failureIndices.autoShardingEvent); } } @@ -1045,30 +1035,41 @@ public void writeTo(StreamOutput out) throws IOException { public static final ParseField FAILURE_INDICES_FIELD = new ParseField("failure_indices"); public static final ParseField ROLLOVER_ON_WRITE_FIELD = new ParseField("rollover_on_write"); public static final ParseField AUTO_SHARDING_FIELD = new ParseField("auto_sharding"); + public static final ParseField FAILURE_ROLLOVER_ON_WRITE_FIELD = new ParseField("failure_rollover_on_write"); + public static final ParseField FAILURE_AUTO_SHARDING_FIELD = new ParseField("failure_auto_sharding"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_stream", args -> { // Fields behind a feature flag need to be parsed last otherwise the parser will fail when the feature flag is disabled. // Until the feature flag is removed we keep them separately to be mindful of this. boolean failureStoreEnabled = DataStream.isFailureStoreFeatureFlagEnabled() && args[12] != null && (boolean) args[12]; - List failureStoreIndices = DataStream.isFailureStoreFeatureFlagEnabled() && args[13] != null - ? (List) args[13] - : List.of(); + DataStreamIndices failureIndices = DataStream.isFailureStoreFeatureFlagEnabled() + ? new DataStreamIndices( + FAILURE_STORE_PREFIX, + args[13] != null ? (List) args[13] : List.of(), + args[14] != null && (boolean) args[14], + (DataStreamAutoShardingEvent) args[15] + ) + : new DataStreamIndices(FAILURE_STORE_PREFIX, List.of(), false, null); return new DataStream( (String) args[0], - (List) args[1], (Long) args[2], (Map) args[3], args[4] != null && (boolean) args[4], args[5] != null && (boolean) args[5], args[6] != null && (boolean) args[6], + System::currentTimeMillis, args[7] != null && (boolean) args[7], args[8] != null ? IndexMode.fromString((String) args[8]) : null, (DataStreamLifecycle) args[9], failureStoreEnabled, - failureStoreIndices, - args[10] != null && (boolean) args[10], - (DataStreamAutoShardingEvent) args[11] + new DataStreamIndices( + BACKING_INDEX_PREFIX, + (List) args[1], + args[10] != null && (boolean) args[10], + (DataStreamAutoShardingEvent) args[11] + ), + failureIndices ); }); @@ -1105,6 +1106,12 @@ public void writeTo(StreamOutput out) throws IOException { (p, c) -> Index.fromXContent(p), FAILURE_INDICES_FIELD ); + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), FAILURE_ROLLOVER_ON_WRITE_FIELD); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> DataStreamAutoShardingEvent.fromXContent(p), + FAILURE_AUTO_SHARDING_FIELD + ); } } @@ -1132,11 +1139,8 @@ public XContentBuilder toXContent( .startObject() .field(NAME_FIELD.getPreferredName(), TIMESTAMP_FIELD_NAME) .endObject(); - builder.xContentList(INDICES_FIELD.getPreferredName(), indices); + builder.xContentList(INDICES_FIELD.getPreferredName(), backingIndices.indices); builder.field(GENERATION_FIELD.getPreferredName(), generation); - if (DataStream.isFailureStoreFeatureFlagEnabled() && failureIndices.isEmpty() == false) { - builder.xContentList(FAILURE_INDICES_FIELD.getPreferredName(), failureIndices); - } if (metadata != null) { builder.field(METADATA_FIELD.getPreferredName(), metadata); } @@ -1146,6 +1150,15 @@ public XContentBuilder toXContent( builder.field(ALLOW_CUSTOM_ROUTING.getPreferredName(), allowCustomRouting); if (DataStream.isFailureStoreFeatureFlagEnabled()) { builder.field(FAILURE_STORE_FIELD.getPreferredName(), failureStoreEnabled); + if (failureIndices.indices.isEmpty() == false) { + builder.xContentList(FAILURE_INDICES_FIELD.getPreferredName(), failureIndices.indices); + } + builder.field(FAILURE_ROLLOVER_ON_WRITE_FIELD.getPreferredName(), failureIndices.rolloverOnWrite); + if (failureIndices.autoShardingEvent != null) { + builder.startObject(FAILURE_AUTO_SHARDING_FIELD.getPreferredName()); + failureIndices.autoShardingEvent.toXContent(builder, params); + builder.endObject(); + } } if (indexMode != null) { builder.field(INDEX_MODE.getPreferredName(), indexMode); @@ -1154,10 +1167,10 @@ public XContentBuilder toXContent( builder.field(LIFECYCLE.getPreferredName()); lifecycle.toXContent(builder, params, rolloverConfiguration, isSystem() ? null : globalRetention); } - builder.field(ROLLOVER_ON_WRITE_FIELD.getPreferredName(), rolloverOnWrite); - if (autoShardingEvent != null) { + builder.field(ROLLOVER_ON_WRITE_FIELD.getPreferredName(), backingIndices.rolloverOnWrite); + if (backingIndices.autoShardingEvent != null) { builder.startObject(AUTO_SHARDING_FIELD.getPreferredName()); - autoShardingEvent.toXContent(builder, params); + backingIndices.autoShardingEvent.toXContent(builder, params); builder.endObject(); } builder.endObject(); @@ -1170,7 +1183,6 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DataStream that = (DataStream) o; return name.equals(that.name) - && indices.equals(that.indices) && generation == that.generation && Objects.equals(metadata, that.metadata) && hidden == that.hidden @@ -1180,16 +1192,14 @@ public boolean equals(Object o) { && indexMode == that.indexMode && Objects.equals(lifecycle, that.lifecycle) && failureStoreEnabled == that.failureStoreEnabled - && failureIndices.equals(that.failureIndices) - && rolloverOnWrite == that.rolloverOnWrite - && Objects.equals(autoShardingEvent, that.autoShardingEvent); + && Objects.equals(backingIndices, that.backingIndices) + && Objects.equals(failureIndices, that.failureIndices); } @Override public int hashCode() { return Objects.hash( name, - indices, generation, metadata, hidden, @@ -1199,9 +1209,8 @@ public int hashCode() { indexMode, lifecycle, failureStoreEnabled, - failureIndices, - rolloverOnWrite, - autoShardingEvent + backingIndices, + failureIndices ); } @@ -1345,14 +1354,143 @@ public static Builder builder(String name, List indices) { return new Builder(name, indices); } + public static Builder builder(String name, DataStreamIndices backingIndices) { + return new Builder(name, backingIndices); + } + public Builder copy() { return new Builder(this); } + public static class DataStreamIndices { + private final String namePrefix; + private final List indices; + private final boolean rolloverOnWrite; + @Nullable + private final DataStreamAutoShardingEvent autoShardingEvent; + private Set lookup; + + protected DataStreamIndices( + String namePrefix, + List indices, + boolean rolloverOnWrite, + DataStreamAutoShardingEvent autoShardingEvent + ) { + this.namePrefix = namePrefix; + // The list of indices is expected to be an immutable list. We don't create an immutable copy here, as it might have + // impact on the performance on some usages. + this.indices = indices; + this.rolloverOnWrite = rolloverOnWrite; + this.autoShardingEvent = autoShardingEvent; + + assert getLookup().size() == indices.size() : "found duplicate index entries in " + indices; + } + + private Set getLookup() { + if (lookup == null) { + lookup = indices.stream().map(Index::getName).collect(Collectors.toSet()); + } + return lookup; + } + + public Index getWriteIndex() { + return indices.get(indices.size() - 1); + } + + public boolean containsIndex(String index) { + return getLookup().contains(index); + } + + private String generateName(String dataStreamName, long generation, long epochMillis) { + return getDefaultIndexName(namePrefix, dataStreamName, generation, epochMillis); + } + + public static Builder backingIndicesBuilder(List indices) { + return new Builder(BACKING_INDEX_PREFIX, indices); + } + + public static Builder failureIndicesBuilder(List indices) { + return new Builder(FAILURE_STORE_PREFIX, indices); + } + + public Builder copy() { + return new Builder(this); + } + + public List getIndices() { + return indices; + } + + public boolean isRolloverOnWrite() { + return rolloverOnWrite; + } + + public DataStreamAutoShardingEvent getAutoShardingEvent() { + return autoShardingEvent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataStreamIndices that = (DataStreamIndices) o; + return rolloverOnWrite == that.rolloverOnWrite + && Objects.equals(namePrefix, that.namePrefix) + && Objects.equals(indices, that.indices) + && Objects.equals(autoShardingEvent, that.autoShardingEvent); + } + + @Override + public int hashCode() { + return Objects.hash(namePrefix, indices, rolloverOnWrite, autoShardingEvent); + } + + public static class Builder { + private final String namePrefix; + private List indices; + private boolean rolloverOnWrite = false; + @Nullable + private DataStreamAutoShardingEvent autoShardingEvent = null; + + private Builder(String namePrefix, List indices) { + this.namePrefix = namePrefix; + this.indices = indices; + } + + private Builder(DataStreamIndices dataStreamIndices) { + this.namePrefix = dataStreamIndices.namePrefix; + this.indices = dataStreamIndices.indices; + this.rolloverOnWrite = dataStreamIndices.rolloverOnWrite; + this.autoShardingEvent = dataStreamIndices.autoShardingEvent; + } + + /** + * Set the list of indices. We always create an immutable copy as that's what the constructor expects. + */ + public Builder setIndices(List indices) { + this.indices = List.copyOf(indices); + return this; + } + + public Builder setRolloverOnWrite(boolean rolloverOnWrite) { + this.rolloverOnWrite = rolloverOnWrite; + return this; + } + + public Builder setAutoShardingEvent(DataStreamAutoShardingEvent autoShardingEvent) { + this.autoShardingEvent = autoShardingEvent; + return this; + } + + public DataStreamIndices build() { + return new DataStreamIndices(namePrefix, indices, rolloverOnWrite, autoShardingEvent); + } + } + } + public static class Builder { private LongSupplier timeProvider = System::currentTimeMillis; private String name; - private List indices; private long generation = 1; @Nullable private Map metadata = null; @@ -1364,22 +1502,23 @@ public static class Builder { private IndexMode indexMode = null; @Nullable private DataStreamLifecycle lifecycle = null; - private boolean rolloverOnWrite = false; private boolean failureStoreEnabled = false; - private List failureIndices = List.of(); - @Nullable - private DataStreamAutoShardingEvent autoShardingEvent = null; + private DataStreamIndices backingIndices; + private DataStreamIndices failureIndices = DataStreamIndices.failureIndicesBuilder(List.of()).build(); - public Builder(String name, List indices) { + private Builder(String name, List indices) { + this(name, DataStreamIndices.backingIndicesBuilder(indices).build()); + } + + private Builder(String name, DataStreamIndices backingIndices) { this.name = name; - assert indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; - this.indices = indices; + assert backingIndices.indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; + this.backingIndices = backingIndices; } - public Builder(DataStream dataStream) { + private Builder(DataStream dataStream) { timeProvider = dataStream.timeProvider; name = dataStream.name; - indices = dataStream.indices; generation = dataStream.generation; metadata = dataStream.metadata; hidden = dataStream.hidden; @@ -1388,10 +1527,9 @@ public Builder(DataStream dataStream) { allowCustomRouting = dataStream.allowCustomRouting; indexMode = dataStream.indexMode; lifecycle = dataStream.lifecycle; - rolloverOnWrite = dataStream.rolloverOnWrite; failureStoreEnabled = dataStream.failureStoreEnabled; + backingIndices = dataStream.backingIndices; failureIndices = dataStream.failureIndices; - autoShardingEvent = dataStream.autoShardingEvent; } public Builder setTimeProvider(LongSupplier timeProvider) { @@ -1404,12 +1542,6 @@ public Builder setName(String name) { return this; } - public Builder setIndices(List indices) { - assert indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; - this.indices = indices; - return this; - } - public Builder setGeneration(long generation) { this.generation = generation; return this; @@ -1450,30 +1582,34 @@ public Builder setLifecycle(DataStreamLifecycle lifecycle) { return this; } - public Builder setRolloverOnWrite(boolean rolloverOnWrite) { - this.rolloverOnWrite = rolloverOnWrite; + public Builder setFailureStoreEnabled(boolean failureStoreEnabled) { + this.failureStoreEnabled = failureStoreEnabled; return this; } - public Builder setFailureStoreEnabled(boolean failureStoreEnabled) { - this.failureStoreEnabled = failureStoreEnabled; + public Builder setBackingIndices(DataStreamIndices backingIndices) { + assert backingIndices.indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; + this.backingIndices = backingIndices; return this; } - public Builder setFailureIndices(List failureIndices) { + public Builder setFailureIndices(DataStreamIndices failureIndices) { this.failureIndices = failureIndices; return this; } - public Builder setAutoShardingEvent(DataStreamAutoShardingEvent autoShardingEvent) { - this.autoShardingEvent = autoShardingEvent; + public Builder setDataStreamIndices(boolean targetFailureStore, DataStreamIndices indices) { + if (targetFailureStore) { + setFailureIndices(indices); + } else { + setBackingIndices(indices); + } return this; } public DataStream build() { return new DataStream( name, - indices, generation, metadata, hidden, @@ -1484,9 +1620,8 @@ public DataStream build() { indexMode, lifecycle, failureStoreEnabled, - failureIndices, - rolloverOnWrite, - autoShardingEvent + backingIndices, + failureIndices ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java index fef9ebe993a4d..c65f83eca0aa2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java @@ -89,7 +89,7 @@ public DataStreamMetadata( public DataStreamMetadata(StreamInput in) throws IOException { this( - in.readImmutableOpenMap(StreamInput::readString, DataStream::new), + in.readImmutableOpenMap(StreamInput::readString, DataStream::read), in.readImmutableOpenMap(StreamInput::readString, DataStreamAlias::new) ); } @@ -265,7 +265,7 @@ public String toString() { static class DataStreamMetadataDiff implements NamedDiff { private static final DiffableUtils.DiffableValueReader DS_DIFF_READER = new DiffableUtils.DiffableValueReader<>( - DataStream::new, + DataStream::read, DataStream::readDiffFrom ); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index 1ce950cf71f58..403b4b85e664b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -204,7 +204,7 @@ public static DesiredNode readFrom(StreamInput in) throws IOException { final var memory = ByteSizeValue.readFrom(in); final var storage = ByteSizeValue.readFrom(in); final String version; - if (in.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { version = in.readOptionalString(); } else { version = Version.readVersion(in).toString(); @@ -237,7 +237,7 @@ public void writeTo(StreamOutput out) throws IOException { } memory.writeTo(out); storage.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(version); } else { Version parsedVersion = parseLegacyVersion(version); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 678655252248f..681dcb3e314e3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -189,13 +189,17 @@ public void writeTo(StreamOutput out) throws IOException { } } + /** + * This is a safety limit that should only be exceeded in very rare and special cases. The assumption is that + * 99% of the users have less than 1024 shards per index. We also make it a hard check that requires restart of nodes + * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards + * per cluster. this also prevents creating stuff like a new index with millions of shards by accident which essentially + * kills the entire cluster with OOM on the spot. + */ + public static final String PER_INDEX_MAX_NUMBER_OF_SHARDS = "1024"; + static Setting buildNumberOfShardsSetting() { - /* This is a safety limit that should only be exceeded in very rare and special cases. The assumption is that - * 99% of the users have less than 1024 shards per index. We also make it a hard check that requires restart of nodes - * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards - * per cluster. this also prevents creating stuff like a new index with millions of shards by accident which essentially - * kills the entire cluster with OOM on the spot.*/ - final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", "1024")); + final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", PER_INDEX_MAX_NUMBER_OF_SHARDS)); if (maxNumShards < 1) { throw new IllegalArgumentException("es.index.max_number_of_shards must be > 0"); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index effc89d8e535a..8bc8f9d96bf24 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -425,7 +425,7 @@ private static void resolveIndicesForDataStream(Context context, DataStream data if (shouldIncludeFailureIndices(context.getOptions(), dataStream)) { // We short-circuit here, if failure indices are not allowed and they can be skipped if (context.getOptions().allowFailureIndices() || context.getOptions().ignoreUnavailable() == false) { - for (Index index : dataStream.getFailureIndices()) { + for (Index index : dataStream.getFailureIndices().getIndices()) { if (shouldTrackConcreteIndex(context, context.getOptions(), index)) { concreteIndicesResult.add(index); } @@ -470,7 +470,7 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract count += dataStream.getIndices().size(); } if (shouldIncludeFailureIndices(context.getOptions(), dataStream)) { - count += dataStream.getFailureIndices().size(); + count += dataStream.getFailureIndices().getIndices().size(); } return count > 1; } @@ -1431,7 +1431,7 @@ && shouldIncludeFailureIndices(context.getOptions(), (DataStream) indexAbstracti DataStream dataStream = (DataStream) indexAbstraction; indicesStateStream = Stream.concat( indicesStateStream, - dataStream.getFailureIndices().stream().map(context.state.metadata()::index) + dataStream.getFailureIndices().getIndices().stream().map(context.state.metadata()::index) ); } if (excludeState != null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index fec209960597b..e25c12d0c2ad7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -2600,7 +2600,10 @@ private static boolean assertContainsIndexIfDataStream(DataStream parent, IndexM || parent.getIndices().stream().anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName())) || (DataStream.isFailureStoreFeatureFlagEnabled() && parent.isFailureStoreEnabled() - && parent.getFailureIndices().stream().anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName()))) + && parent.getFailureIndices() + .getIndices() + .stream() + .anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName()))) : "Expected data stream [" + parent.getName() + "] to contain index " + indexMetadata.getIndex(); return true; } @@ -2623,7 +2626,7 @@ private static void collectDataStreams( indexToDataStreamLookup.put(i.getName(), dataStream); } if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.isFailureStoreEnabled()) { - for (Index i : dataStream.getFailureIndices()) { + for (Index i : dataStream.getFailureIndices().getIndices()) { indexToDataStreamLookup.put(i.getName(), dataStream); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 2d1d38ac926d6..1062f741cf0bd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -111,8 +111,8 @@ public ClusterState execute(ClusterState currentState) throws Exception { ); DataStream createdDataStream = clusterState.metadata().dataStreams().get(request.name); firstBackingIndexRef.set(createdDataStream.getIndices().get(0).getName()); - if (createdDataStream.getFailureIndices().isEmpty() == false) { - firstFailureStoreRef.set(createdDataStream.getFailureIndices().get(0).getName()); + if (createdDataStream.getFailureIndices().getIndices().isEmpty() == false) { + firstFailureStoreRef.set(createdDataStream.getFailureIndices().getIndices().get(0).getName()); } return clusterState; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index a018f3d93a9bc..7363e71d65c72 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -77,7 +77,12 @@ public Tuple executeTask( ClusterState clusterState ) { return new Tuple<>( - setRolloverOnWrite(clusterState, setRolloverOnWriteTask.getDataStreamName(), setRolloverOnWriteTask.rolloverOnWrite()), + setRolloverOnWrite( + clusterState, + setRolloverOnWriteTask.getDataStreamName(), + setRolloverOnWriteTask.rolloverOnWrite(), + setRolloverOnWriteTask.targetFailureStore() + ), setRolloverOnWriteTask ); } @@ -152,13 +157,14 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String public void setRolloverOnWrite( String dataStreamName, boolean rolloverOnWrite, + boolean targetFailureStore, TimeValue ackTimeout, TimeValue masterTimeout, ActionListener listener ) { setRolloverOnWriteTaskQueue.submitTask( "set-rollover-on-write", - new SetRolloverOnWriteTask(dataStreamName, rolloverOnWrite, ackTimeout, listener), + new SetRolloverOnWriteTask(dataStreamName, rolloverOnWrite, targetFailureStore, ackTimeout, listener), masterTimeout ); } @@ -230,16 +236,25 @@ ClusterState updateDataLifecycle(ClusterState currentState, List dataStr * @param currentState the initial cluster state * @param dataStreamName the name of the data stream to be updated * @param rolloverOnWrite the value of the flag + * @param targetFailureStore whether this rollover targets the failure store or the backing indices * @return the updated cluster state */ - public static ClusterState setRolloverOnWrite(ClusterState currentState, String dataStreamName, boolean rolloverOnWrite) { + public static ClusterState setRolloverOnWrite( + ClusterState currentState, + String dataStreamName, + boolean rolloverOnWrite, + boolean targetFailureStore + ) { Metadata metadata = currentState.metadata(); var dataStream = validateDataStream(metadata, dataStreamName); - if (dataStream.rolloverOnWrite() == rolloverOnWrite) { + var indices = dataStream.getDataStreamIndices(targetFailureStore); + if (indices.isRolloverOnWrite() == rolloverOnWrite) { return currentState; } Metadata.Builder builder = Metadata.builder(metadata); - builder.put(dataStream.copy().setRolloverOnWrite(rolloverOnWrite).build()); + builder.put( + dataStream.copy().setDataStreamIndices(targetFailureStore, indices.copy().setRolloverOnWrite(rolloverOnWrite).build()).build() + ); return ClusterState.builder(currentState).metadata(builder.build()).build(); } @@ -286,7 +301,7 @@ private static void removeBackingIndex( ) { boolean indexNotRemoved = true; DataStream dataStream = validateDataStream(metadata, dataStreamName); - List targetIndices = failureStore ? dataStream.getFailureIndices() : dataStream.getIndices(); + List targetIndices = failureStore ? dataStream.getFailureIndices().getIndices() : dataStream.getIndices(); for (Index backingIndex : targetIndices) { if (backingIndex.getName().equals(indexName)) { if (failureStore) { @@ -365,16 +380,19 @@ static class SetRolloverOnWriteTask extends AckedBatchedClusterStateUpdateTask { private final String dataStreamName; private final boolean rolloverOnWrite; + private final boolean targetFailureStore; SetRolloverOnWriteTask( String dataStreamName, boolean rolloverOnWrite, + boolean targetFailureStore, TimeValue ackTimeout, ActionListener listener ) { super(ackTimeout, listener); this.dataStreamName = dataStreamName; this.rolloverOnWrite = rolloverOnWrite; + this.targetFailureStore = targetFailureStore; } public String getDataStreamName() { @@ -384,5 +402,9 @@ public String getDataStreamName() { public boolean rolloverOnWrite() { return rolloverOnWrite; } + + public boolean targetFailureStore() { + return targetFailureStore; + } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 631845dc33288..f5bb97af7625f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -1864,7 +1864,7 @@ public static class PutRequest { CompressedXContent mappings = null; List aliases = new ArrayList<>(); - TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT; public PutRequest(String cause, String name) { this.cause = cause; @@ -1914,7 +1914,7 @@ public PutRequest version(Integer version) { public static class RemoveRequest { final String name; - TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT; public RemoveRequest(String name) { this.name = name; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java index ebdf6e4b3d8ee..3b1257a510747 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; @@ -223,14 +224,14 @@ public static class DefaultHotAllocationSettingProvider implements IndexSettingP @Override public Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { - Set settings = allSettings.keySet(); + Set settings = indexTemplateAndCreateRequestSettings.keySet(); if (settings.contains(TIER_PREFERENCE)) { // just a marker -- this null value will be removed or overridden by the template/request settings return NULL_TIER_PREFERENCE_SETTINGS; diff --git a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java index 6da0845a7c7ba..65bfa804cec2f 100644 --- a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.gateway.GatewayService; import java.io.IOException; import java.nio.file.Files; @@ -58,7 +59,8 @@ protected void doStop() { @Override public final void clusterChanged(ClusterChangedEvent event) { ClusterState clusterState = event.state(); - if (clusterState.nodes().isLocalNodeElectedMaster()) { + if (clusterState.nodes().isLocalNodeElectedMaster() + && clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { synchronized (this) { if (watching() || active == false) { refreshExistingFileStateIfNeeded(clusterState); diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 4fb02fdaac7b4..a385950e10922 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -113,7 +113,7 @@ public enum Property { DeprecatedWarning, /** - * Node scope + * Cluster-level or configuration file-level setting. Not an index setting. */ NodeScope, diff --git a/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java b/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java new file mode 100644 index 0000000000000..39dbb83bdf5a4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.util.stream.IntStream; + +/** + * A CharSequence that provides a subsequence of another CharSequence without allocating a new backing array (as String does) + */ +class CharSubSequence implements CharSequence { + private final CharSequence wrapped; + private final int startOffset; // inclusive + private final int endOffset; // exclusive + + CharSubSequence(CharSequence wrapped, int startOffset, int endOffset) { + if (startOffset < 0) throw new IllegalArgumentException(); + if (endOffset > wrapped.length()) throw new IllegalArgumentException(); + if (endOffset < startOffset) throw new IllegalArgumentException(); + + this.wrapped = wrapped; + this.startOffset = startOffset; + this.endOffset = endOffset; + } + + @Override + public int length() { + return endOffset - startOffset; + } + + @Override + public char charAt(int index) { + int adjustedIndex = index + startOffset; + if (adjustedIndex < startOffset || adjustedIndex >= endOffset) throw new IndexOutOfBoundsException(index); + return wrapped.charAt(adjustedIndex); + } + + @Override + public boolean isEmpty() { + return startOffset == endOffset; + } + + @Override + public CharSequence subSequence(int start, int end) { + int adjustedStart = start + startOffset; + int adjustedEnd = end + startOffset; + if (adjustedStart < startOffset) throw new IndexOutOfBoundsException(start); + if (adjustedEnd > endOffset) throw new IndexOutOfBoundsException(end); + if (adjustedStart > adjustedEnd) throw new IndexOutOfBoundsException(); + + return wrapped.subSequence(adjustedStart, adjustedEnd); + } + + @Override + public IntStream chars() { + return wrapped.chars().skip(startOffset).limit(endOffset - startOffset); + } + + @Override + public String toString() { + return wrapped.subSequence(startOffset, endOffset).toString(); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 7dae11fb8d720..1133eac3f8f7b 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -9,7 +9,10 @@ package org.elasticsearch.common.time; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Booleans; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.logging.internal.spi.LoggerFactory; import java.time.Instant; import java.time.LocalDate; @@ -30,6 +33,7 @@ import java.time.temporal.TemporalQuery; import java.time.temporal.WeekFields; import java.util.Locale; +import java.util.Set; import java.util.stream.Stream; import static java.time.temporal.ChronoField.DAY_OF_MONTH; @@ -43,6 +47,24 @@ public class DateFormatters { + /** + * The ISO8601 parser is as close as possible to the java.time based parsers, but there are some strings + * that are no longer accepted (multiple fractional seconds, or multiple timezones) by the ISO parser. + * If a string cannot be parsed by the ISO parser, it then tries the java.time one. + * If there's lots of these strings, trying the ISO parser, then the java.time parser, might cause a performance drop. + * So provide a JVM option so that users can just use the java.time parsers, if they really need to. + */ + @UpdateForV9 // evaluate if we need to deprecate/remove this + private static final boolean JAVA_TIME_PARSERS_ONLY = Booleans.parseBoolean(System.getProperty("es.datetime.java_time_parsers"), false); + + static { + // when this is used directly in tests ES logging may not have been initialized yet + LoggerFactory logger; + if (JAVA_TIME_PARSERS_ONLY && (logger = LoggerFactory.provider()) != null) { + logger.getLogger(DateFormatters.class).info("Using java.time datetime parsers only"); + } + } + private static DateFormatter newDateFormatter(String format, DateTimeFormatter formatter) { return new JavaDateFormatter(format, new JavaTimeDateTimePrinter(formatter), new JavaTimeDateTimeParser(formatter)); } @@ -168,11 +190,18 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p /** * Returns a generic ISO datetime parser where the date is mandatory and the time is optional. */ - private static final DateFormatter STRICT_DATE_OPTIONAL_TIME = newDateFormatter( - "strict_date_optional_time", - STRICT_DATE_OPTIONAL_TIME_PRINTER, - STRICT_DATE_OPTIONAL_TIME_FORMATTER - ); + private static final DateFormatter STRICT_DATE_OPTIONAL_TIME; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser(STRICT_DATE_OPTIONAL_TIME_FORMATTER); + + STRICT_DATE_OPTIONAL_TIME = new JavaDateFormatter( + "strict_date_optional_time", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { new Iso8601DateTimeParser(Set.of(), false).withLocale(Locale.ROOT), javaTimeParser } + ); + } private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS = new DateTimeFormatterBuilder().append( STRICT_YEAR_MONTH_DAY_FORMATTER @@ -224,51 +253,69 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p /** * Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution. */ - private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = newDateFormatter( - "strict_date_optional_time_nanos", - STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS, - STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS - ); + private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser(STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS); + + STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter( + "strict_date_optional_time_nanos", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { + new Iso8601DateTimeParser(Set.of(HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), true).withLocale(Locale.ROOT), + javaTimeParser } + ); + } /** * Returns a ISO 8601 compatible date time formatter and parser. * This is not fully compatible to the existing spec, which would require far more edge cases, but merely compatible with the * existing legacy joda time ISO date formatter */ - private static final DateFormatter ISO_8601 = newDateFormatter( - "iso8601", - STRICT_DATE_OPTIONAL_TIME_PRINTER, - new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .optionalStart() - .appendLiteral('T') - .optionalStart() - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendFraction(NANO_OF_SECOND, 1, 9, true) - .optionalEnd() - .optionalStart() - .appendLiteral(",") - .appendFraction(NANO_OF_SECOND, 1, 9, false) - .optionalEnd() - .optionalEnd() - .optionalEnd() - .optionalEnd() - .optionalStart() - .appendZoneOrOffsetId() - .optionalEnd() - .optionalStart() - .append(TIME_ZONE_FORMATTER_NO_COLON) - .optionalEnd() - .optionalEnd() - .toFormatter(Locale.ROOT) - .withResolverStyle(ResolverStyle.STRICT) - ); + private static final DateFormatter ISO_8601; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser( + new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .optionalStart() + .appendLiteral('T') + .optionalStart() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 1, 9, true) + .optionalEnd() + .optionalStart() + .appendLiteral(",") + .appendFraction(NANO_OF_SECOND, 1, 9, false) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_NO_COLON) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT) + ); + + ISO_8601 = new JavaDateFormatter( + "iso8601", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { new Iso8601DateTimeParser(Set.of(), false).withLocale(Locale.ROOT), javaTimeParser } + ); + } ///////////////////////////////////////// // diff --git a/server/src/main/java/org/elasticsearch/common/time/DateTime.java b/server/src/main/java/org/elasticsearch/common/time/DateTime.java new file mode 100644 index 0000000000000..101389b43d9fc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/DateTime.java @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalField; +import java.time.temporal.TemporalQueries; +import java.time.temporal.TemporalQuery; +import java.time.temporal.UnsupportedTemporalTypeException; + +/** + * Provides information on a parsed datetime + */ +record DateTime( + int years, + Integer months, + Integer days, + Integer hours, + Integer minutes, + Integer seconds, + Integer nanos, + ZoneId zoneId, + ZoneOffset offset +) implements TemporalAccessor { + + @Override + @SuppressWarnings("unchecked") + public R query(TemporalQuery query) { + // shortcut a few queries used by DateFormatters.from + if (query == TemporalQueries.zoneId()) { + return (R) zoneId; + } + if (query == TemporalQueries.offset()) { + return (R) offset; + } + if (query == DateFormatters.LOCAL_DATE_QUERY || query == TemporalQueries.localDate()) { + if (months != null && days != null) { + return (R) LocalDate.of(years, months, days); + } + return null; + } + if (query == TemporalQueries.localTime()) { + if (hours != null && minutes != null && seconds != null) { + return (R) LocalTime.of(hours, minutes, seconds, nanos != null ? nanos : 0); + } + return null; + } + return TemporalAccessor.super.query(query); + } + + @Override + public boolean isSupported(TemporalField field) { + if (field instanceof ChronoField f) { + return switch (f) { + case YEAR -> true; + case MONTH_OF_YEAR -> months != null; + case DAY_OF_MONTH -> days != null; + case HOUR_OF_DAY -> hours != null; + case MINUTE_OF_HOUR -> minutes != null; + case SECOND_OF_MINUTE -> seconds != null; + case INSTANT_SECONDS -> months != null && days != null && hours != null && minutes != null && seconds != null; + // if the time components are there, we just default nanos to 0 if it's not present + case SECOND_OF_DAY, NANO_OF_SECOND, NANO_OF_DAY -> hours != null && minutes != null && seconds != null; + case OFFSET_SECONDS -> offset != null; + default -> false; + }; + } + + return field.isSupportedBy(this); + } + + @Override + public long getLong(TemporalField field) { + if (field instanceof ChronoField f) { + switch (f) { + case YEAR -> { + return years; + } + case MONTH_OF_YEAR -> { + return extractValue(f, months); + } + case DAY_OF_MONTH -> { + return extractValue(f, days); + } + case HOUR_OF_DAY -> { + return extractValue(f, hours); + } + case MINUTE_OF_HOUR -> { + return extractValue(f, minutes); + } + case SECOND_OF_MINUTE -> { + return extractValue(f, seconds); + } + case INSTANT_SECONDS -> { + if (isSupported(ChronoField.INSTANT_SECONDS) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalDateTime.of(years, months, days, hours, minutes, seconds) + .toEpochSecond(offset != null ? offset : ZoneOffset.UTC); + } + case SECOND_OF_DAY -> { + if (isSupported(ChronoField.SECOND_OF_DAY) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalTime.of(hours, minutes, seconds).toSecondOfDay(); + } + case NANO_OF_SECOND -> { + if (isSupported(ChronoField.NANO_OF_SECOND) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return nanos != null ? nanos.longValue() : 0L; + } + case NANO_OF_DAY -> { + if (isSupported(ChronoField.NANO_OF_DAY) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalTime.of(hours, minutes, seconds, nanos != null ? nanos : 0).toNanoOfDay(); + } + case OFFSET_SECONDS -> { + if (offset == null) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return offset.getTotalSeconds(); + } + default -> throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + } + + return field.getFrom(this); + } + + private static long extractValue(ChronoField field, Number value) { + if (value == null) { + throw new UnsupportedTemporalTypeException("No " + field + " value available"); + } + return value.longValue(); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java b/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java new file mode 100644 index 0000000000000..2a526a36408ce --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.time.ZoneId; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +class Iso8601DateTimeParser implements DateTimeParser { + + private final Iso8601Parser parser; + private final ZoneId timezone; + // the locale doesn't actually matter, as we're parsing in a standardised format + // and we already account for . or , in decimals + private final Locale locale; + + Iso8601DateTimeParser(Set mandatoryFields, boolean optionalTime) { + parser = new Iso8601Parser(mandatoryFields, optionalTime, Map.of()); + timezone = null; + locale = null; + } + + private Iso8601DateTimeParser(Iso8601Parser parser, ZoneId timezone, Locale locale) { + this.parser = parser; + this.timezone = timezone; + this.locale = locale; + } + + @Override + public ZoneId getZone() { + return timezone; + } + + @Override + public Locale getLocale() { + return locale; + } + + @Override + public DateTimeParser withZone(ZoneId zone) { + return new Iso8601DateTimeParser(parser, zone, locale); + } + + @Override + public DateTimeParser withLocale(Locale locale) { + return new Iso8601DateTimeParser(parser, timezone, locale); + } + + Iso8601DateTimeParser withDefaults(Map defaults) { + return new Iso8601DateTimeParser(new Iso8601Parser(parser.mandatoryFields(), parser.optionalTime(), defaults), timezone, locale); + } + + @Override + public TemporalAccessor parse(CharSequence str) { + var result = parser.tryParse(str, timezone); + var temporal = result.result(); + if (temporal == null) { + throw new DateTimeParseException("Could not fully parse datetime", str, result.errorIndex()); + } + return temporal; + } + + @Override + public Optional tryParse(CharSequence str) { + return Optional.ofNullable(parser.tryParse(str, timezone).result()); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java b/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java new file mode 100644 index 0000000000000..4f1d131dd8ced --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java @@ -0,0 +1,521 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.core.Nullable; + +import java.time.DateTimeException; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoField; +import java.util.EnumMap; +import java.util.EnumSet; +import java.util.Map; +import java.util.Set; + +/** + * Parses datetimes in ISO8601 format (and subsequences thereof). + *

+ * This is faster than the generic parsing in {@link java.time.format.DateTimeFormatter}, as this is hard-coded and specific to ISO-8601. + * Various public libraries provide their own variant of this mechanism. We use our own for a few reasons: + *

    + *
  • + * We are historically a bit more lenient with strings that are invalid according to the strict specification + * (eg using a zone region instead of offset for timezone) + *
  • + *
  • Various built-in formats specify some fields as mandatory and some as optional
  • + *
  • Callers can specify defaults for fields that are not present (eg for roundup parsers)
  • + *
+ * We also do not use exceptions here, instead returning {@code null} for any invalid values, that are then + * checked and propagated as appropriate. + */ +class Iso8601Parser { + + /** + * The result of the parse. If successful, {@code result} will be non-null. + * If parse failed, {@code errorIndex} specifies the index into the parsed string + * that the first invalid data was encountered. + */ + record Result(@Nullable DateTime result, int errorIndex) { + Result(DateTime result) { + this(result, -1); + } + + static Result error(int errorIndex) { + return new Result(null, errorIndex); + } + } + + private static final Set VALID_MANDATORY_FIELDS = EnumSet.of( + ChronoField.YEAR, + ChronoField.MONTH_OF_YEAR, + ChronoField.DAY_OF_MONTH, + ChronoField.HOUR_OF_DAY, + ChronoField.MINUTE_OF_HOUR, + ChronoField.SECOND_OF_MINUTE + ); + + private static final Set VALID_DEFAULT_FIELDS = EnumSet.of( + ChronoField.MONTH_OF_YEAR, + ChronoField.DAY_OF_MONTH, + ChronoField.HOUR_OF_DAY, + ChronoField.MINUTE_OF_HOUR, + ChronoField.SECOND_OF_MINUTE, + ChronoField.NANO_OF_SECOND + ); + + private final Set mandatoryFields; + private final boolean optionalTime; + private final Map defaults; + + /** + * Constructs a new {@code Iso8601Parser} object + * + * @param mandatoryFields + * The set of fields that must be present for a valid parse. These should be specified in field order + * (eg if {@link ChronoField#DAY_OF_MONTH} is specified, {@link ChronoField#MONTH_OF_YEAR} should also be specified). + * {@link ChronoField#YEAR} is always mandatory. + * @param optionalTime + * {@code false} if the presence of time fields follows {@code mandatoryFields}, + * {@code true} if a time component is always optional, despite the presence of time fields in {@code mandatoryFields}. + * This makes it possible to specify 'time is optional, but if it is present, it must have these fields' + * by settings {@code optionalTime = true} and putting time fields such as {@link ChronoField#HOUR_OF_DAY} + * and {@link ChronoField#MINUTE_OF_HOUR} in {@code mandatoryFields}. + * @param defaults + * Map of default field values, if they are not present in the parsed string. + */ + Iso8601Parser(Set mandatoryFields, boolean optionalTime, Map defaults) { + checkChronoFields(mandatoryFields, VALID_MANDATORY_FIELDS); + checkChronoFields(defaults.keySet(), VALID_DEFAULT_FIELDS); + + this.mandatoryFields = EnumSet.of(ChronoField.YEAR); // year is always mandatory + this.mandatoryFields.addAll(mandatoryFields); + this.optionalTime = optionalTime; + this.defaults = defaults.isEmpty() ? Map.of() : new EnumMap<>(defaults); + } + + private static void checkChronoFields(Set fields, Set validFields) { + if (fields.isEmpty()) return; // nothing to check + + fields = EnumSet.copyOf(fields); + fields.removeAll(validFields); + if (fields.isEmpty() == false) { + throw new IllegalArgumentException("Invalid chrono fields specified " + fields); + } + } + + boolean optionalTime() { + return optionalTime; + } + + Set mandatoryFields() { + return mandatoryFields; + } + + private boolean isOptional(ChronoField field) { + return mandatoryFields.contains(field) == false; + } + + private Integer defaultZero(ChronoField field) { + return defaults.getOrDefault(field, 0); + } + + /** + * Attempts to parse {@code str} as an ISO-8601 datetime, returning a {@link Result} indicating if the parse + * was successful or not, and what fields were present. + * @param str The string to parse + * @param defaultTimezone The default timezone to return, if no timezone is present in the string + * @return The {@link Result} of the parse. + */ + Result tryParse(CharSequence str, @Nullable ZoneId defaultTimezone) { + if (str.charAt(0) == '-') { + // the year is negative. This is most unusual. + // Instead of always adding offsets and dynamically calculating position in the main parser code below, + // just in case it starts with a -, just parse the substring, then adjust the output appropriately + Result result = parse(new CharSubSequence(str, 1, str.length()), defaultTimezone); + + if (result.errorIndex() >= 0) { + return Result.error(result.errorIndex() + 1); + } else { + DateTime dt = result.result(); + return new Result( + new DateTime( + -dt.years(), + dt.months(), + dt.days(), + dt.hours(), + dt.minutes(), + dt.seconds(), + dt.nanos(), + dt.zoneId(), + dt.offset() + ) + ); + } + } else { + return parse(str, defaultTimezone); + } + } + + /** + * Index {@code i} is the multiplicand to get the number of nanos from the fractional second with {@code i=9-d} digits. + */ + private static final int[] NANO_MULTIPLICANDS = new int[] { 1, 10, 100, 1_000, 10_000, 100_000, 1_000_000, 10_000_000, 100_000_000 }; + + /** + * Parses {@code str} in ISO8601 format. + *

+ * This parses the string using fixed offsets (it does not support variable-width fields) and separators, + * sequentially parsing each field and looking for the correct separator. + * This enables it to be very fast, as all the fields are in fixed places in the string. + * The only variable aspect comes from the timezone, which (fortunately) is only present at the end of the string, + * at any point after a time field. + * It also does not use exceptions, instead returning {@code null} where a value cannot be parsed. + */ + private Result parse(CharSequence str, @Nullable ZoneId defaultTimezone) { + int len = str.length(); + + // YEARS + Integer years = parseInt(str, 0, 4); + if (years == null) return Result.error(0); + if (len == 4) { + return isOptional(ChronoField.MONTH_OF_YEAR) + ? new Result( + withZoneOffset( + years, + defaults.get(ChronoField.MONTH_OF_YEAR), + defaults.get(ChronoField.DAY_OF_MONTH), + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(4); + } + + if (str.charAt(4) != '-') return Result.error(4); + + // MONTHS + Integer months = parseInt(str, 5, 7); + if (months == null || months > 12) return Result.error(5); + if (len == 7) { + return isOptional(ChronoField.DAY_OF_MONTH) + ? new Result( + withZoneOffset( + years, + months, + defaults.get(ChronoField.DAY_OF_MONTH), + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(7); + } + + if (str.charAt(7) != '-') return Result.error(7); + + // DAYS + Integer days = parseInt(str, 8, 10); + if (days == null || days > 31) return Result.error(8); + if (len == 10) { + return optionalTime || isOptional(ChronoField.HOUR_OF_DAY) + ? new Result( + withZoneOffset( + years, + months, + days, + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(10); + } + + if (str.charAt(10) != 'T') return Result.error(10); + if (len == 11) { + return isOptional(ChronoField.HOUR_OF_DAY) + ? new Result( + withZoneOffset( + years, + months, + days, + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(11); + } + + // HOURS + timezone + Integer hours = parseInt(str, 11, 13); + if (hours == null || hours > 23) return Result.error(11); + if (len == 13) { + return isOptional(ChronoField.MINUTE_OF_HOUR) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + defaultZero(ChronoField.MINUTE_OF_HOUR), + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(13); + } + if (isZoneId(str, 13)) { + ZoneId timezone = parseZoneId(str, 13); + return timezone != null && isOptional(ChronoField.MINUTE_OF_HOUR) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + defaultZero(ChronoField.MINUTE_OF_HOUR), + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + timezone + ) + ) + : Result.error(13); + } + + if (str.charAt(13) != ':') return Result.error(13); + + // MINUTES + timezone + Integer minutes = parseInt(str, 14, 16); + if (minutes == null || minutes > 59) return Result.error(14); + if (len == 16) { + return isOptional(ChronoField.SECOND_OF_MINUTE) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + minutes, + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(16); + } + if (isZoneId(str, 16)) { + ZoneId timezone = parseZoneId(str, 16); + return timezone != null && isOptional(ChronoField.SECOND_OF_MINUTE) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + minutes, + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + timezone + ) + ) + : Result.error(16); + } + + if (str.charAt(16) != ':') return Result.error(16); + + // SECONDS + timezone + Integer seconds = parseInt(str, 17, 19); + if (seconds == null || seconds > 59) return Result.error(17); + if (len == 19) { + return new Result( + withZoneOffset(years, months, days, hours, minutes, seconds, defaultZero(ChronoField.NANO_OF_SECOND), defaultTimezone) + ); + } + if (isZoneId(str, 19)) { + ZoneId timezone = parseZoneId(str, 19); + return timezone != null + ? new Result( + withZoneOffset(years, months, days, hours, minutes, seconds, defaultZero(ChronoField.NANO_OF_SECOND), timezone) + ) + : Result.error(19); + } + + char decSeparator = str.charAt(19); + if (decSeparator != '.' && decSeparator != ',') return Result.error(19); + + // NANOS + timezone + // nanos are always optional + // the last number could be millis or nanos, or any combination in the middle + // so we keep parsing numbers until we get to not a number + int nanos = 0; + int pos; + for (pos = 20; pos < len && pos < 29; pos++) { + char c = str.charAt(pos); + if (c < ZERO || c > NINE) break; + nanos = nanos * 10 + (c - ZERO); + } + + if (pos == 20) return Result.error(20); // didn't find a number at all + + // multiply it by the correct multiplicand to get the nanos + nanos *= NANO_MULTIPLICANDS[29 - pos]; + + if (len == pos) { + return new Result(withZoneOffset(years, months, days, hours, minutes, seconds, nanos, defaultTimezone)); + } + if (isZoneId(str, pos)) { + ZoneId timezone = parseZoneId(str, pos); + return timezone != null + ? new Result(withZoneOffset(years, months, days, hours, minutes, seconds, nanos, timezone)) + : Result.error(pos); + } + + // still chars left at the end - string is not valid + return Result.error(pos); + } + + private static boolean isZoneId(CharSequence str, int pos) { + // all region zoneIds must start with [A-Za-z] (see ZoneId#of) + // this also covers Z and UT/UTC/GMT zone variants + char c = str.charAt(pos); + return c == '+' || c == '-' || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'); + } + + /** + * This parses the zone offset, which is of the format accepted by {@link java.time.ZoneId#of(String)}. + * It has fast paths for numerical offsets, but falls back on {@code ZoneId.of} for non-trivial zone ids. + */ + private ZoneId parseZoneId(CharSequence str, int pos) { + int len = str.length(); + char first = str.charAt(pos); + + if (first == 'Z' && len == pos + 1) { + return ZoneOffset.UTC; + } + + boolean positive; + switch (first) { + case '+' -> positive = true; + case '-' -> positive = false; + default -> { + // non-trivial zone offset, fallback on the built-in java zoneid parser + try { + return ZoneId.of(str.subSequence(pos, str.length()).toString()); + } catch (DateTimeException e) { + return null; + } + } + } + pos++; // read the + or - + + Integer hours = parseInt(str, pos, pos += 2); + if (hours == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, 0, 0, positive); + + boolean hasColon = false; + if (str.charAt(pos) == ':') { + pos++; + hasColon = true; + } + + Integer minutes = parseInt(str, pos, pos += 2); + if (minutes == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, minutes, 0, positive); + + // either both dividers have a colon, or neither do + if ((str.charAt(pos) == ':') != hasColon) return null; + if (hasColon) { + pos++; + } + + Integer seconds = parseInt(str, pos, pos += 2); + if (seconds == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, minutes, seconds, positive); + + // there's some text left over... + return null; + } + + /* + * ZoneOffset.ofTotalSeconds has a ConcurrentHashMap cache of offsets. This is fine, + * but it does mean there's an expensive map lookup every time we call ofTotalSeconds. + * There's no way to get round that, but we can at least have a very quick last-value cache here + * to avoid doing a full map lookup when there's lots of timestamps with the same offset being parsed + */ + private final ThreadLocal lastOffset = ThreadLocal.withInitial(() -> ZoneOffset.UTC); + + private ZoneOffset ofHoursMinutesSeconds(int hours, int minutes, int seconds, boolean positive) { + int totalSeconds = hours * 3600 + minutes * 60 + seconds; + if (positive == false) { + totalSeconds = -totalSeconds; + } + + // check the lastOffset value + ZoneOffset lastOffset = this.lastOffset.get(); + if (totalSeconds == lastOffset.getTotalSeconds()) { + return lastOffset; + } + + try { + ZoneOffset offset = ZoneOffset.ofTotalSeconds(totalSeconds); + this.lastOffset.set(lastOffset); + return offset; + } catch (DateTimeException e) { + // zoneoffset is out of range + return null; + } + } + + /** + * Create a {@code DateTime} object, with the ZoneOffset field set when the zone is an offset, not just an id. + */ + private static DateTime withZoneOffset( + int years, + Integer months, + Integer days, + Integer hours, + Integer minutes, + Integer seconds, + Integer nanos, + ZoneId zoneId + ) { + if (zoneId instanceof ZoneOffset zo) { + return new DateTime(years, months, days, hours, minutes, seconds, nanos, zoneId, zo); + } else { + return new DateTime(years, months, days, hours, minutes, seconds, nanos, zoneId, null); + } + } + + private static final char ZERO = '0'; + private static final char NINE = '9'; + + private static Integer parseInt(CharSequence str, int startInclusive, int endExclusive) { + if (str.length() < endExclusive) return null; + + int result = 0; + for (int i = startInclusive; i < endExclusive; i++) { + char c = str.charAt(i); + if (c < ZERO || c > NINE) return null; + result = result * 10 + (c - ZERO); + } + return result; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index 9c39ee51276d7..707b07c1d68d9 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -21,15 +21,21 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.function.UnaryOperator; +import static java.util.Map.entry; + class JavaDateFormatter implements DateFormatter { @SuppressWarnings("unchecked") private static T defaultRoundUp(T parser) { if (parser instanceof JavaTimeDateTimeParser jtp) { return (T) defaultRoundUp(jtp); } + if (parser instanceof Iso8601DateTimeParser iso) { + return (T) defaultRoundUp(iso); + } throw new IllegalArgumentException("Unknown parser implementation " + parser.getClass()); } @@ -78,6 +84,19 @@ private static JavaTimeDateTimeParser defaultRoundUp(JavaTimeDateTimeParser pars return new JavaTimeDateTimeParser(builder.toFormatter(parser.getLocale())); } + private static Iso8601DateTimeParser defaultRoundUp(Iso8601DateTimeParser parser) { + return parser.withDefaults( + Map.ofEntries( + entry(ChronoField.MONTH_OF_YEAR, 1), + entry(ChronoField.DAY_OF_MONTH, 1), + entry(ChronoField.HOUR_OF_DAY, 23), + entry(ChronoField.MINUTE_OF_HOUR, 59), + entry(ChronoField.SECOND_OF_MINUTE, 59), + entry(ChronoField.NANO_OF_SECOND, 999_999_999) + ) + ); + } + private final String format; private final DateTimePrinter printer; private final DateTimeParser[] parsers; diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java index 697c5eff939f9..941e034a83dea 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java @@ -39,9 +39,7 @@ public HealthInfo(StreamInput input) throws IOException { input.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0) ? input.readOptionalWriteable(DataStreamLifecycleHealthInfo::new) : null, - input.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) - ? input.readMap(RepositoriesHealthInfo::new) - : Map.of() + input.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? input.readMap(RepositoriesHealthInfo::new) : Map.of() ); } @@ -51,7 +49,7 @@ public void writeTo(StreamOutput output) throws IOException { if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { output.writeOptionalWriteable(dslHealthInfo); } - if (output.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { output.writeMap(repositoriesInfoByNode, StreamOutput::writeWriteable); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java index f780de078527a..b0dc5958c7ed0 100644 --- a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java @@ -71,7 +71,7 @@ public Request(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { this.diskHealthInfo = in.readOptionalWriteable(DiskHealthInfo::new); this.dslHealthInfo = in.readOptionalWriteable(DataStreamLifecycleHealthInfo::new); - this.repositoriesHealthInfo = in.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) + this.repositoriesHealthInfo = in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readOptionalWriteable(RepositoriesHealthInfo::new) : null; } else { @@ -113,13 +113,13 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeOptionalWriteable(diskHealthInfo); out.writeOptionalWriteable(dslHealthInfo); - if (out.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalWriteable(repositoriesHealthInfo); } } else { // BWC for pre-8.12 the disk health info was mandatory. Evolving this request has proven tricky however we've made use of - // waiting for all nodes to be on the {@link TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS} transport version - // before sending any requests to update the health info that'd break the pre HEALTH_INFO_ENRICHED_WITH_DSL_STATUS + // waiting for all nodes to be on the {@link TransportVersions.V_8_12_0} transport version + // before sending any requests to update the health info that'd break the pre-8.12 // transport invariant of always having a disk health information in the request diskHealthInfo.writeTo(out); } diff --git a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index 8b087f5a302db..047c38138fda0 100644 --- a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -95,6 +95,18 @@ public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexSh } } + @Override + public void afterIndexShardClosing(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { + for (IndexEventListener listener : listeners) { + try { + listener.afterIndexShardClosing(shardId, indexShard, indexSettings); + } catch (Exception e) { + logger.warn(() -> "[" + shardId.getId() + "] failed to invoke after shard closing callback", e); + throw e; + } + } + } + @Override public void afterIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { for (IndexEventListener listener : listeners) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index eb0672f7ad122..88db674c3ec2f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -649,6 +649,7 @@ public void onFailure(Exception e) { onResponse(null); // otherwise ignore the exception } }, l -> indexShard.close(reason, flushEngine, closeExecutor, l)); + listener.afterIndexShardClosing(sId, indexShard, indexSettings); } } } finally { diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index e67196c9090c9..bbf7cc3e0e1e9 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; @@ -31,20 +32,20 @@ public interface IndexSettingProvider { * @param indexName The name of the new index being created * @param dataStreamName The name of the data stream if the index being created is part of a data stream otherwise * null - * @param timeSeries Whether the template is in time series mode. + * @param isTimeSeries Whether the template is in time series mode. * @param metadata The current metadata instance that doesn't yet contain the index to be created * @param resolvedAt The time the request to create this new index was accepted. - * @param allSettings All the setting resolved from the template that matches and any setting defined on the create index - * request + * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings + * defined on the create index request * @param combinedTemplateMappings All the mappings resolved from the template that matches */ Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ); diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 78f07c8a137b9..f076ee0be5540 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -105,6 +105,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_00_1, Version.LUCENE_9_10_0); + public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java index 0e6f117266e35..c3eb0c4c0290a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java @@ -58,4 +58,9 @@ public void write(XContentBuilder b) throws IOException { writeValue(b, values.binaryValue()); } + + @Override + public String fieldName() { + return name; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java index af341e64661d1..a7283cf0a28ec 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java @@ -178,5 +178,10 @@ public void write(XContentBuilder b) throws IOException { } b.field(NAME, postings.freq()); } + + @Override + public String fieldName() { + return NAME; + } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 9476c3e719e0b..08421af332fe4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -838,7 +838,15 @@ protected String contentType() { private static class NoOpObjectMapper extends ObjectMapper { NoOpObjectMapper(String name, String fullPath) { - super(name, fullPath, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_TRUE, Dynamic.RUNTIME, Collections.emptyMap()); + super( + name, + fullPath, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, + Dynamic.RUNTIME, + Collections.emptyMap() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index b5de3971fa091..acfe0fcfbf5bd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -21,6 +21,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import java.util.stream.Stream; /** * Wrapper around everything that defines a mapping, without references to @@ -125,7 +126,8 @@ private boolean isSourceSynthetic() { } public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - return root.syntheticFieldLoader(Arrays.stream(metadataMappers)); + var stream = Stream.concat(Stream.of(metadataMappers), root.mappers.values().stream()); + return root.syntheticFieldLoader(stream); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 5c2880a4bf760..a8955e46f0ad4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -149,7 +149,7 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { String nestedTypePath, Query nestedTypeFilter ) { - super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, dynamic, mappers); + super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_FALSE, dynamic, mappers); this.nestedTypePath = nestedTypePath; this.nestedTypeFilter = nestedTypeFilter; this.includeInParent = includeInParent; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 6d5a43ae41bd0..6336e6ca0b764 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -37,10 +37,12 @@ public class ObjectMapper extends Mapper { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class); public static final String CONTENT_TYPE = "object"; + static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; public static class Defaults { public static final boolean ENABLED = true; public static final Explicit SUBOBJECTS = Explicit.IMPLICIT_TRUE; + public static final Explicit TRACK_ARRAY_SOURCE = Explicit.IMPLICIT_FALSE; public static final Dynamic DYNAMIC = Dynamic.TRUE; } @@ -78,6 +80,7 @@ static Dynamic getRootDynamic(MappingLookup mappingLookup) { public static class Builder extends Mapper.Builder { protected final Explicit subobjects; protected Explicit enabled = Explicit.IMPLICIT_TRUE; + protected Explicit trackArraySource = Defaults.TRACK_ARRAY_SOURCE; protected Dynamic dynamic; protected final List mappersBuilders = new ArrayList<>(); @@ -91,6 +94,11 @@ public Builder enabled(boolean enabled) { return this; } + public Builder trackArraySource(boolean value) { + this.trackArraySource = Explicit.explicitBoolean(value); + return this; + } + public Builder dynamic(Dynamic dynamic) { this.dynamic = dynamic; return this; @@ -182,6 +190,7 @@ public ObjectMapper build(MapperBuilderContext context) { context.buildFullName(name()), enabled, subobjects, + trackArraySource, dynamic, buildMappers(context.createChildContext(name(), dynamic)) ); @@ -242,6 +251,9 @@ protected static boolean parseObjectOrDocumentTypeProperties( } else if (fieldName.equals("enabled")) { builder.enabled(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".enabled")); return true; + } else if (fieldName.equals(STORE_ARRAY_SOURCE_PARAM)) { + builder.trackArraySource(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".track_array_source")); + return true; } else if (fieldName.equals("properties")) { if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) { // nothing to do here, empty (to support "properties: []" case) @@ -369,6 +381,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate protected final Explicit enabled; protected final Explicit subobjects; + protected final Explicit trackArraySource; protected final Dynamic dynamic; protected final Map mappers; @@ -378,6 +391,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate String fullPath, Explicit enabled, Explicit subobjects, + Explicit trackArraySource, Dynamic dynamic, Map mappers ) { @@ -387,6 +401,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate this.fullPath = internFieldName(fullPath); this.enabled = enabled; this.subobjects = subobjects; + this.trackArraySource = trackArraySource; this.dynamic = dynamic; if (mappers == null) { this.mappers = Map.of(); @@ -412,7 +427,7 @@ public ObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { * This is typically used in the context of a mapper merge when there's not enough budget to add the entire object. */ ObjectMapper withoutMappers() { - return new ObjectMapper(simpleName(), fullPath, enabled, subobjects, dynamic, Map.of()); + return new ObjectMapper(simpleName(), fullPath, enabled, subobjects, trackArraySource, dynamic, Map.of()); } @Override @@ -454,6 +469,10 @@ public final boolean subobjects() { return subobjects.value(); } + public final boolean trackArraySource() { + return trackArraySource.value(); + } + @Override public void validate(MappingLookup mappers) { for (Mapper mapper : this.mappers.values()) { @@ -480,6 +499,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex fullPath, mergeResult.enabled, mergeResult.subObjects, + mergeResult.trackArraySource, mergeResult.dynamic, mergeResult.mappers ); @@ -488,6 +508,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex protected record MergeResult( Explicit enabled, Explicit subObjects, + Explicit trackArraySource, ObjectMapper.Dynamic dynamic, Map mappers ) { @@ -519,11 +540,26 @@ static MergeResult build(ObjectMapper existing, ObjectMapper mergeWithObject, Ma } else { subObjects = existing.subobjects; } + final Explicit trackArraySource; + if (mergeWithObject.trackArraySource.explicit()) { + if (reason == MergeReason.INDEX_TEMPLATE) { + trackArraySource = mergeWithObject.trackArraySource; + } else if (existing.trackArraySource != mergeWithObject.trackArraySource) { + throw new MapperException( + "the [track_array_source] parameter can't be updated for the object mapping [" + existing.name() + "]" + ); + } else { + trackArraySource = existing.trackArraySource; + } + } else { + trackArraySource = existing.trackArraySource; + } MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.simpleName()); Map mergedMappers = buildMergedMappers(existing, mergeWithObject, objectMergeContext, subObjects.value()); return new MergeResult( enabled, subObjects, + trackArraySource, mergeWithObject.dynamic != null ? mergeWithObject.dynamic : existing.dynamic, mergedMappers ); @@ -680,6 +716,9 @@ void toXContent(XContentBuilder builder, Params params, ToXContent custom) throw if (subobjects != Defaults.SUBOBJECTS) { builder.field("subobjects", subobjects.value()); } + if (trackArraySource != Defaults.TRACK_ARRAY_SOURCE) { + builder.field(STORE_ARRAY_SOURCE_PARAM, trackArraySource.value()); + } if (custom != null) { custom.toXContent(builder, params); } @@ -712,19 +751,17 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep } - public SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream extra) { - return new SyntheticSourceFieldLoader( - Stream.concat(extra, mappers.values().stream()) - .sorted(Comparator.comparing(Mapper::name)) - .map(Mapper::syntheticFieldLoader) - .filter(l -> l != null) - .toList() - ); + public SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream mappers) { + var fields = mappers.sorted(Comparator.comparing(Mapper::name)) + .map(Mapper::syntheticFieldLoader) + .filter(l -> l != SourceLoader.SyntheticFieldLoader.NOTHING) + .toList(); + return new SyntheticSourceFieldLoader(fields); } @Override public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - return syntheticFieldLoader(Stream.empty()); + return syntheticFieldLoader(mappers.values().stream()); } private class SyntheticSourceFieldLoader implements SourceLoader.SyntheticFieldLoader { @@ -830,6 +867,11 @@ public boolean setIgnoredValues(Map timeSeriesDimensionSubFields ) { // Subobjects are not currently supported. - super(name, fullPath, enabled, Explicit.IMPLICIT_FALSE, dynamic, mappers); + super(name, fullPath, enabled, Explicit.IMPLICIT_FALSE, Explicit.IMPLICIT_FALSE, dynamic, mappers); this.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 9e0680e6e6e6a..c19809760ec43 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -117,6 +117,7 @@ public RootObjectMapper build(MapperBuilderContext context) { name(), enabled, subobjects, + trackArraySource, dynamic, mappers, new HashMap<>(runtimeFields), @@ -262,6 +263,7 @@ private static boolean isConflictingObject(Mapper mapper, String[] parts) { String name, Explicit enabled, Explicit subobjects, + Explicit trackArraySource, Dynamic dynamic, Map mappers, Map runtimeFields, @@ -270,7 +272,7 @@ private static boolean isConflictingObject(Mapper mapper, String[] parts) { Explicit dateDetection, Explicit numericDetection ) { - super(name, name, enabled, subobjects, dynamic, mappers); + super(name, name, enabled, subobjects, trackArraySource, dynamic, mappers); this.runtimeFields = runtimeFields; this.dynamicTemplates = dynamicTemplates; this.dynamicDateTimeFormatters = dynamicDateTimeFormatters; @@ -292,6 +294,7 @@ RootObjectMapper withoutMappers() { simpleName(), enabled, subobjects, + trackArraySource, dynamic, Map.of(), Map.of(), @@ -407,6 +410,7 @@ public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeCo simpleName(), mergeResult.enabled(), mergeResult.subObjects(), + mergeResult.trackArraySource(), mergeResult.dynamic(), mergeResult.mappers(), Map.copyOf(runtimeFields), diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SortedNumericDocValuesSyntheticFieldLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/SortedNumericDocValuesSyntheticFieldLoader.java index c3ebe079e886e..96ba151472a03 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SortedNumericDocValuesSyntheticFieldLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SortedNumericDocValuesSyntheticFieldLoader.java @@ -232,4 +232,9 @@ public static SortedNumericDocValues docValuesOrNull(LeafReader reader, String f } return null; } + + @Override + public String fieldName() { + return name; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoader.java index 37b6fe72c3089..335e551365931 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoader.java @@ -69,6 +69,11 @@ public SortedSetDocValuesSyntheticFieldLoader( : IgnoreMalformedStoredValues.empty(); } + @Override + public String fieldName() { + return name; + } + @Override public Stream> storedFieldLoaders() { if (storedValuesName == null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 233faf462400b..1b6d6dd1141f4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -134,10 +134,11 @@ public static class Builder extends MetadataFieldMapper.Builder { private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { super(Defaults.NAME); this.indexMode = indexMode; - this.supportsNonDefaultParameterValues = settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false + || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); } public Builder setSynthetic() { @@ -212,7 +213,11 @@ public SourceFieldMapper build() { c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, - c -> new Builder(c.getIndexSettings().getMode(), c.getSettings()) + c -> new Builder( + c.getIndexSettings().getMode(), + c.getSettings(), + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + ) ); static final class SourceFieldType extends MappedFieldType { @@ -347,7 +352,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY).init(this); + return new Builder(indexMode, Settings.EMPTY, false).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java index d8879338bea1e..dea3494f408d9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java @@ -215,6 +215,11 @@ public boolean hasValue() { @Override public void write(XContentBuilder b) {} + + @Override + public String fieldName() { + return ""; + } }; /** @@ -242,10 +247,20 @@ public void write(XContentBuilder b) {} */ void write(XContentBuilder b) throws IOException; + /** + * Allows for identifying and tracking additional field values to include in the field source. + * @param objectsWithIgnoredFields maps object names to lists of fields they contain with special source handling + * @return true if any matching fields are identified + */ default boolean setIgnoredValues(Map> objectsWithIgnoredFields) { return false; } + /** + * Returns the canonical field name for this loader. + */ + String fieldName(); + /** * Sync for stored field values. */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java index 6ae7c5f20233e..b26aed11233f9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java @@ -89,4 +89,9 @@ public final void write(XContentBuilder b) throws IOException { public final DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) throws IOException { return null; } + + @Override + public String fieldName() { + return name; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 5159a76206ef6..9ecd68ec27803 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -1671,6 +1671,11 @@ public void write(XContentBuilder b) throws IOException { } b.endArray(); } + + @Override + public String fieldName() { + return name(); + } } private class DocValuesSyntheticFieldLoader implements SourceLoader.SyntheticFieldLoader { @@ -1721,5 +1726,10 @@ public void write(XContentBuilder b) throws IOException { } b.endArray(); } + + @Override + public String fieldName() { + return name(); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java index 5bea31d2d204d..b27a275889751 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java @@ -55,6 +55,13 @@ default void afterIndexShardStarted(IndexShard indexShard) {} */ default void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {} + /** + * Called after the index shard has been marked closed. It could still be waiting for the async close of the engine. + * The ordering between this and the subsequent state notifications (closed, deleted, store closed) is + * not guaranteed. + */ + default void afterIndexShardClosing(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {} + /** * Called after the index shard has been closed. * diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 5c1381f730013..c29e248b1a689 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -293,7 +293,8 @@ public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { if (parent instanceof ESToParentBlockJoinQuery) { hasUnknownLeaf[0] = true; } - return super.getSubVisitor(occur, parent); + // we want to visit all queries, including those within the must_not clauses. + return this; } }); return hasUnknownLeaf[0]; diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 14e8ce80fcf26..9585711b5562e 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -257,7 +257,7 @@ static NodeConstruction prepareConstruction( ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); SettingsModule settingsModule = constructor.validateSettings(initialEnvironment.settings(), settings, threadPool); - SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool); + SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool, telemetryProvider); constructor.createClientAndRegistries(settingsModule.getSettings(), threadPool, searchModule); DocumentParsingProvider documentParsingProvider = constructor.getDocumentParsingProvider(); @@ -525,9 +525,9 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, return settingsModule; } - private SearchModule createSearchModule(Settings settings, ThreadPool threadPool) { + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool, TelemetryProvider telemetryProvider) { IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); - return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList()); + return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList(), telemetryProvider); } /** diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index 44e86e056ef3b..7ab682d3143e7 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -53,7 +53,9 @@ public static class Request extends MasterNodeRequest { private String localAbortReason; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -64,6 +66,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, long allocationId, Exception exception, String localAbortReason) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.exception = exception; this.allocationId = allocationId; diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 1fbdd03dcc268..26cf0658f60b9 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -41,7 +41,9 @@ public static class Request extends MasterNodeRequest { private String taskId; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -49,6 +51,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; } diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 299891c64711a..ce0e46e7b0425 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -51,7 +51,9 @@ public static class Request extends MasterNodeRequest { private PersistentTaskParams params; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -61,6 +63,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, String taskName, PersistentTaskParams params) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.taskName = taskName; this.params = params; diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index dcf86f85eb709..6ecefa1bbf847 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -45,7 +45,9 @@ public static class Request extends MasterNodeRequest { private long allocationId = -1L; private PersistentTaskState state; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -55,6 +57,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, long allocationId, PersistentTaskState state) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.allocationId = allocationId; this.state = state; diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 535758ed71eac..1ac42a91736c3 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateHandlerMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; @@ -80,6 +81,13 @@ ActionListener listener() { } protected ClusterState execute(final ClusterState currentState) { + if (currentState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + // If cluster state has become blocked, this task was submitted while the node was master but is now not master. + // The new master will re-read file settings, so whatever update was to be written here will be handled + // by the new master. + return currentState; + } + ReservedStateMetadata existingMetadata = currentState.metadata().reservedStateMetadata().get(namespace); Map reservedState = stateChunk.state(); ReservedStateVersion reservedStateVersion = stateChunk.metadata(); diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 70801cdef560b..b142e4d567c04 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -76,13 +76,18 @@ public final long getUsageCount() { @Override public abstract List routes(); + private static final Set ALWAYS_SUPPORTED = Set.of("format", "filter_path", "pretty", "human"); + @Override public final void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { // check if the query has any parameters that are not in the supported set (if declared) Set supported = supportedQueryParameters(); - if (supported != null && supported.containsAll(request.params().keySet()) == false) { - Set unsupported = Sets.difference(request.params().keySet(), supported); - throw new IllegalArgumentException(unrecognized(request, unsupported, supported, "parameter")); + if (supported != null) { + var allSupported = Sets.union(ALWAYS_SUPPORTED, supported); + if (allSupported.containsAll(request.params().keySet()) == false) { + Set unsupported = Sets.difference(request.params().keySet(), allSupported); + throw new IllegalArgumentException(unrecognized(request, unsupported, allSupported, "parameter")); + } } // prepare the request for execution; has the side effect of touching the request parameters diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java index 73b788d63b2ab..111204fbe7fb8 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -12,10 +12,17 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import java.util.Map; +import java.util.Set; public class RestFeatures implements FeatureSpecification { + @Override + public Set getFeatures() { + return Set.of(RestNodesCapabilitiesAction.CAPABILITIES_ACTION); + } + @Override public Map getHistoricalFeatures() { return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java index 9b89a6a932dd3..fae7903d02b82 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -26,6 +27,8 @@ @ServerlessScope(Scope.INTERNAL) public class RestNodesCapabilitiesAction extends BaseRestHandler { + public static final NodeFeature CAPABILITIES_ACTION = new NodeFeature("rest.capabilities_action"); + @Override public List routes() { return List.of(new Route(RestRequest.Method.GET, "/_capabilities")); diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 97b747c650c1b..8d5fa0a7ac155 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -254,6 +254,7 @@ import org.elasticsearch.search.vectors.KnnScoreDocQueryBuilder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.search.vectors.QueryVectorBuilder; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -289,6 +290,11 @@ public class SearchModule { Setting.Property.NodeScope ); + /** + * Metric name for aggregation usage statistics + */ + private final TelemetryProvider telemetryProvider; + private final Map highlighters; private final List fetchSubPhases = new ArrayList<>(); @@ -306,7 +312,19 @@ public class SearchModule { * @param plugins List of included {@link SearchPlugin} objects. */ public SearchModule(Settings settings, List plugins) { + this(settings, plugins, TelemetryProvider.NOOP); + } + + /** + * Constructs a new SearchModule object + * + * @param settings Current settings + * @param plugins List of included {@link SearchPlugin} objects. + * @param telemetryProvider + */ + public SearchModule(Settings settings, List plugins, TelemetryProvider telemetryProvider) { this.settings = settings; + this.telemetryProvider = telemetryProvider; registerSuggesters(plugins); highlighters = setupHighlighters(settings, plugins); registerScoreFunctions(plugins); @@ -352,7 +370,7 @@ public Map getHighlighters() { } private ValuesSourceRegistry registerAggregations(List plugins) { - ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(); + ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(telemetryProvider.getMeterRegistry()); registerAggregation( new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder.PARSER).addResultReader( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index f29850a306b75..e75b2d2002b0f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -236,11 +236,7 @@ public int hashCode() { } boolean versionSupportsDownsamplingTimezone(TransportVersion version) { - return version.onOrAfter(TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ) - || version.between( - TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH, - TransportVersions.NODE_STATS_REQUEST_SIMPLIFIED - ); + return version.onOrAfter(TransportVersions.V_8_13_0) || version.isPatchFrom(TransportVersions.V_8_12_1); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index f8e7f3cf3a69c..91bb4c3f0cd74 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -29,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; /** * Result of the significant terms aggregation. @@ -208,10 +210,27 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont reduceContext.bigArrays() ); + private InternalAggregation referenceAgg = null; + @Override public void accept(InternalAggregation aggregation) { + /* + canLeadReduction here is essentially checking if this shard returned data. Unmapped shards (that didn't + specify a missing value) will be false. Since they didn't return data, we can safely skip them, and + doing so prevents us from accidentally taking one as the reference agg for type checking, which would cause + shards that actually returned data to fail. + */ + if (aggregation.canLeadReduction() == false) { + return; + } @SuppressWarnings("unchecked") final InternalSignificantTerms terms = (InternalSignificantTerms) aggregation; + if (referenceAgg == null) { + referenceAgg = terms; + } else if (referenceAgg.getClass().equals(terms.getClass()) == false) { + // We got here because shards had different mappings for the same field (presumably different indices) + throw AggregationErrors.reduceTypeMismatch(referenceAgg.getName(), Optional.empty()); + } // Compute the overall result set size and the corpus size using the // top-level Aggregations from each shard globalSubsetSize += terms.getSubsetSize(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 853aa152db036..28ef6f934d287 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -9,12 +9,18 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.LongAdder; public class AggregationUsageService implements ReportingService { + private static final String ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT = "es.search.query.aggregations.total"; + private final String AGGREGATION_NAME_KEY = "aggregation_name"; + private final String VALUES_SOURCE_KEY = "values_source"; + private final LongCounter aggregationsUsageCounter; private final Map> aggs; private final AggregationInfo info; @@ -22,9 +28,16 @@ public class AggregationUsageService implements ReportingService> aggs; + private final MeterRegistry meterRegistry; public Builder() { + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { aggs = new HashMap<>(); + assert meterRegistry != null; + this.meterRegistry = meterRegistry; } public void registerAggregationUsage(String aggregationName) { @@ -45,9 +58,16 @@ public AggregationUsageService build() { } } + // Attribute names for the metric + private AggregationUsageService(Builder builder) { this.aggs = builder.aggs; info = new AggregationInfo(aggs); + this.aggregationsUsageCounter = builder.meterRegistry.registerLongCounter( + ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT, + "Aggregations usage", + "count" + ); } public void incAggregationUsage(String aggregationName, String valuesSourceType) { @@ -61,6 +81,8 @@ public void incAggregationUsage(String aggregationName, String valuesSourceType) assert adder != null : "Unknown subtype [" + aggregationName + "][" + valuesSourceType + "]"; } assert valuesSourceMap != null : "Unknown aggregation [" + aggregationName + "][" + valuesSourceType + "]"; + // tests will have a no-op implementation here + aggregationsUsageCounter.incrementBy(1, Map.of(AGGREGATION_NAME_KEY, aggregationName, VALUES_SOURCE_KEY, valuesSourceType)); } public Map getUsageStats() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java index 44e66d98f0258..fcfcad96d9fbf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java @@ -10,6 +10,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.AbstractMap; import java.util.ArrayList; @@ -58,7 +59,11 @@ public static class Builder { private final Map, List>> aggregatorRegistry = new HashMap<>(); public Builder() { - this.usageServiceBuilder = new AggregationUsageService.Builder(); + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { + this.usageServiceBuilder = new AggregationUsageService.Builder(meterRegistry); } /** diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 4b5c647da0c9a..0c54e8ff89589 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -167,23 +167,35 @@ protected SearchHit nextDoc(int doc) throws IOException { leafSourceLoader, leafIdLoader ); - sourceProvider.source = hit.source(); - fieldLookupProvider.setPreloadedStoredFieldValues(hit.hit().getId(), hit.loadedFields()); - for (FetchSubPhaseProcessor processor : processors) { - processor.process(hit); + boolean success = false; + try { + sourceProvider.source = hit.source(); + fieldLookupProvider.setPreloadedStoredFieldValues(hit.hit().getId(), hit.loadedFields()); + for (FetchSubPhaseProcessor processor : processors) { + processor.process(hit); + } + success = true; + return hit.hit(); + } finally { + if (success == false) { + hit.hit().decRef(); + } } - return hit.hit(); } }; SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad); if (context.isCancelled()) { + for (SearchHit hit : hits) { + // release all hits that would otherwise become owned and eventually released by SearchHits below + hit.decRef(); + } throw new TaskCancelledException("cancelled"); } TotalHits totalHits = context.getTotalHits(); - return SearchHits.unpooled(hits, totalHits, context.getMaxScore()); + return new SearchHits(hits, totalHits, context.getMaxScore()); } List getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) { @@ -257,12 +269,12 @@ private static HitContext prepareNonNestedHitContext( String id = idLoader.getId(subDocId); if (id == null) { - // TODO: can we use pooled buffers here as well? - SearchHit hit = SearchHit.unpooled(docId, null); + SearchHit hit = new SearchHit(docId); + // TODO: can we use real pooled buffers here as well? Source source = Source.lazy(lazyStoredSourceLoader(profiler, subReaderContext, subDocId)); return new HitContext(hit, subReaderContext, subDocId, Map.of(), source); } else { - SearchHit hit = SearchHit.unpooled(docId, id); + SearchHit hit = new SearchHit(docId, id); Source source; if (requiresSource) { Timer timer = profiler.startLoadingSource(); @@ -339,7 +351,7 @@ private static HitContext prepareNestedHitContext( assert nestedIdentity != null; Source nestedSource = nestedIdentity.extractSource(rootSource); - SearchHit hit = SearchHit.unpooled(topDocId, rootId, nestedIdentity); + SearchHit hit = new SearchHit(topDocId, rootId, nestedIdentity); return new HitContext(hit, subReaderContext, nestedInfo.doc(), childFieldLoader.storedFields(), nestedSource); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index cc39113f2009f..81b3e7465feee 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -67,6 +67,7 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde setNextReader(ctx, docsInLeaf); } currentDoc = docs[i].docId; + assert searchHits[docs[i].index] == null; searchHits[docs[i].index] = nextDoc(docs[i].docId); } } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 4c3d3948ff889..4170f7e2f8b4b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -61,8 +61,13 @@ public FetchSearchResult fetchResult() { public void shardResult(SearchHits hits, ProfileResult profileResult) { assert assertNoSearchTarget(hits); + assert hasReferences(); + var existing = this.hits; + if (existing != null) { + existing.decRef(); + } this.hits = hits; - hits.incRef(); + hits.mustIncRef(); assert this.profileResult == null; this.profileResult = profileResult; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index 3b8e4e69d9318..68e46186e4505 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -28,7 +28,7 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { } assert fetchSourceContext.fetchSource(); SourceFilter sourceFilter = fetchSourceContext.filter(); - + final boolean filterExcludesAll = sourceFilter.excludesAll(); return new FetchSubPhaseProcessor() { private int fastPath; @@ -67,8 +67,13 @@ private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitCon return; } - // Otherwise, filter the source and add it to the hit. - source = source.filter(sourceFilter); + if (filterExcludesAll) { + // we can just add an empty map + source = Source.empty(source.sourceContentType()); + } else { + // Otherwise, filter the source and add it to the hit. + source = source.filter(sourceFilter); + } if (nestedHit) { source = extractNested(source, hitContext.hit().getNestedIdentity()); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index ccb54801472a6..a4ba982e1dd73 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -104,6 +104,7 @@ private void hitExecute(Map innerHi } } var h = fetchResult.hits(); + assert hit.isPooled() || h.isPooled() == false; results.put(entry.getKey(), h); h.mustIncRef(); } diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 232c12e944a96..35f96ee2dc102 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -351,6 +351,7 @@ public Query rewrittenQuery() { * Adds a releasable that will be freed when this context is closed. */ public void addReleasable(Releasable releasable) { // TODO most Releasables are managed by their callers. We probably don't need this. + assert closed.get() == false; releasables.add(releasable); } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java index 3bf32159c1676..ceffb32c08b48 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java @@ -109,4 +109,8 @@ private Function buildBytesFilter() { } }; } + + public boolean excludesAll() { + return Arrays.asList(excludes).contains("*"); + } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java index 60b0d259961da..1f05b215699b1 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java @@ -128,6 +128,6 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.NESTED_KNN_MORE_INNER_HITS; + return TransportVersions.V_8_13_0; } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java index 6de6338b604ef..65f8c60297ad8 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java @@ -63,7 +63,7 @@ public KnnScoreDocQueryBuilder(ScoreDoc[] scoreDocs, String fieldName, VectorDat public KnnScoreDocQueryBuilder(StreamInput in) throws IOException { super(in); this.scoreDocs = in.readArray(Lucene::readScoreDoc, ScoreDoc[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.NESTED_KNN_MORE_INNER_HITS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.fieldName = in.readOptionalString(); if (in.readBoolean()) { if (in.getTransportVersion().onOrAfter(TransportVersions.KNN_EXPLICIT_BYTE_QUERY_VECTOR_PARSING)) { @@ -100,7 +100,7 @@ VectorData queryVector() { @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeArray(Lucene::writeScoreDoc, scoreDocs); - if (out.getTransportVersion().onOrAfter(TransportVersions.NESTED_KNN_MORE_INNER_HITS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(fieldName); if (queryVector != null) { out.writeBoolean(true); diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index aa5daa532cf42..0c8dfc9a98330 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -167,7 +167,7 @@ private KnnVectorQueryBuilder( public KnnVectorQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.numCands = in.readOptionalVInt(); } else { this.numCands = in.readVInt(); @@ -245,7 +245,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { throw new IllegalStateException("missing a rewriteAndFetch?"); } out.writeString(fieldName); - if (out.getTransportVersion().onOrAfter(TransportVersions.KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalVInt(numCands); } else { if (numCands == null) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 5cabe22389529..fa6ea9c6519d8 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -704,7 +704,10 @@ static DataStream updateDataStream(DataStream dataStream, Metadata.Builder metad .stream() .map(i -> metadata.get(renameIndex(i.getName(), request, true)).getIndex()) .toList(); - return dataStream.copy().setName(dataStreamName).setIndices(updatedIndices).build(); + return dataStream.copy() + .setName(dataStreamName) + .setBackingIndices(dataStream.getBackingIndices().copy().setIndices(updatedIndices).build()) + .build(); } public static RestoreInProgress updateRestoreStateWithDeletedIndices(RestoreInProgress oldRestore, Set deletedIndices) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index d505a6ded4809..dd8ddcffd5fe3 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -4126,7 +4126,7 @@ static ClusterState executeBatch( } private static boolean supportsNodeRemovalTracking(ClusterState clusterState) { - return clusterState.getMinTransportVersion().onOrAfter(TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED); + return clusterState.getMinTransportVersion().onOrAfter(TransportVersions.V_8_13_0); } private final MasterServiceTaskQueue updateNodeIdsToRemoveQueue; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index e6f0da6a45452..526f327b91c19 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -119,3 +119,4 @@ 8.13.1,8595000 8.13.2,8595000 8.13.3,8595000 +8.13.4,8595001 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index bc6523c98761c..39f2a701726af 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -119,3 +119,4 @@ 8.13.1,8503000 8.13.2,8503000 8.13.3,8503000 +8.13.4,8503000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index 6098ea777d38a..7ccdb5da6d736 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.reroute; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; @@ -22,6 +21,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.admin.cluster.RestClusterRerouteAction; import org.elasticsearch.test.ESTestCase; @@ -202,7 +202,7 @@ private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOExcep if (original.isRetryFailed() || randomBoolean()) { params.put("retry_failed", Boolean.toString(original.isRetryFailed())); } - if (false == original.masterNodeTimeout().equals(MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT) || randomBoolean()) { + if (false == original.masterNodeTimeout().equals(TimeValue.THIRTY_SECONDS) || randomBoolean()) { params.put(REST_MASTER_TIMEOUT_PARAM, original.masterNodeTimeout().toString()); } if (original.getCommands() != null) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java index 2a64fbad97575..d76bfc03e1d7f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java @@ -67,8 +67,8 @@ public void testCCSCompatibilityCheck() { @Override public void writeTo(StreamOutput out) throws IOException { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); @@ -99,7 +99,7 @@ public void writeTo(StreamOutput out) throws IOException { assertThat(ex.getMessage(), containsString("not compatible with version")); assertThat(ex.getMessage(), containsString("and the 'search.check_ccs_compatibility' setting is enabled.")); - assertThat(ex.getCause().getMessage(), containsString("ResolveClusterAction requires at least Transport Version")); + assertThat(ex.getCause().getMessage(), containsString("ResolveClusterAction requires at least version")); } finally { assertTrue(ESTestCase.terminate(threadPool)); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index 149752578e1ea..c2edf9729b8b8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -682,7 +682,9 @@ public void testRolloverClusterStateForDataStreamFailureStore() throws Exception Metadata.Builder builder = Metadata.builder(); builder.put("template", template); dataStream.getIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); - dataStream.getFailureIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); + dataStream.getFailureIndices() + .getIndices() + .forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); builder.put(dataStream); final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); @@ -723,15 +725,18 @@ public void testRolloverClusterStateForDataStreamFailureStore() throws Exception assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); assertEquals(newIndexName, rolloverResult.rolloverIndexName()); Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); - assertEquals(dataStream.getIndices().size() + dataStream.getFailureIndices().size() + 1, rolloverMetadata.indices().size()); + assertEquals( + dataStream.getIndices().size() + dataStream.getFailureIndices().getIndices().size() + 1, + rolloverMetadata.indices().size() + ); IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName); var ds = (DataStream) rolloverMetadata.getIndicesLookup().get(dataStream.getName()); assertThat(ds.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM)); assertThat(ds.getIndices(), hasSize(dataStream.getIndices().size())); - assertThat(ds.getFailureIndices(), hasSize(dataStream.getFailureIndices().size() + 1)); - assertThat(ds.getFailureIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); - assertThat(ds.getFailureIndices(), hasItem(rolloverIndexMetadata.getIndex())); + assertThat(ds.getFailureIndices().getIndices(), hasSize(dataStream.getFailureIndices().getIndices().size() + 1)); + assertThat(ds.getFailureIndices().getIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); + assertThat(ds.getFailureIndices().getIndices(), hasItem(rolloverIndexMetadata.getIndex())); assertThat(ds.getFailureStoreWriteIndex(), equalTo(rolloverIndexMetadata.getIndex())); RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(dataStream.getName()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 42c4dec3e219b..9dbabe2c41893 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -440,12 +440,13 @@ public void testLazyRollover() throws Exception { doAnswer(invocation -> { Object[] args = invocation.getArguments(); - assert args.length == 5; + assert args.length == 6; @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) args[4]; + ActionListener listener = (ActionListener) args[5]; listener.onResponse(AcknowledgedResponse.TRUE); return null; - }).when(mockMetadataDataStreamService).setRolloverOnWrite(eq(dataStream.getName()), eq(true), any(), any(), anyActionListener()); + }).when(mockMetadataDataStreamService) + .setRolloverOnWrite(eq(dataStream.getName()), eq(true), eq(false), any(), any(), anyActionListener()); final TransportRolloverAction transportRolloverAction = new TransportRolloverAction( mock(TransportService.class), diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 9803082bbd88a..8bc2a978af0cf 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -768,10 +768,10 @@ private DataStream createDataStream( builder.put(indexMetadata, false); backingIndices.add(indexMetadata.getIndex()); } - return DataStream.builder(dataStreamName, backingIndices) - .setGeneration(backingIndicesCount) - .setAutoShardingEvent(autoShardingEvent) - .build(); + return DataStream.builder( + dataStreamName, + DataStream.DataStreamIndices.backingIndicesBuilder(backingIndices).setAutoShardingEvent(autoShardingEvent).build() + ).setGeneration(backingIndicesCount).build(); } private IndexMetadata createIndexMetadata( diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index b873bec2bd427..6d24f8d2fe9e0 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.action.support.master; +import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; @@ -36,6 +37,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -58,6 +60,8 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -149,7 +153,9 @@ public static class Request extends MasterNodeRequest implements Indice private String[] indices = Strings.EMPTY_ARRAY; private final RefCounted refCounted = AbstractRefCounted.of(() -> {}); - Request() {} + Request() { + super(TimeValue.THIRTY_SECONDS); + } Request(StreamInput in) throws IOException { super(in); @@ -475,6 +481,7 @@ public void testMasterNotAvailable() throws ExecutionException, InterruptedExcep assertFalse(request.hasReferences()); } + @TestLogging(reason = "testing TRACE logging", value = "org.elasticsearch.cluster.service:TRACE") public void testMasterBecomesAvailable() throws ExecutionException, InterruptedException { Request request = new Request(); if (randomBoolean()) { @@ -482,11 +489,24 @@ public void testMasterBecomesAvailable() throws ExecutionException, InterruptedE } setState(clusterService, ClusterStateCreationUtils.state(localNode, null, allNodes)); PlainActionFuture listener = new PlainActionFuture<>(); - ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), null, request, listener); + final var task = new Task(randomNonNegativeLong(), "test", "internal:testAction", "", TaskId.EMPTY_TASK_ID, Map.of()); + ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), task, request, listener); assertFalse(listener.isDone()); request.decRef(); assertTrue(request.hasReferences()); - setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); + + MockLogAppender.assertThatLogger( + () -> setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)), + ClusterApplierService.class, + new MockLogAppender.SeenEventExpectation( + "listener log", + ClusterApplierService.class.getCanonicalName(), + Level.TRACE, + "calling [ClusterStateObserver[ObservingContext[ContextPreservingListener[listener for [execution of [" + + task + + "]] retrying after cluster state version [*]]]]] with change to version [*]" + ) + ); assertTrue(listener.isDone()); assertFalse(request.hasReferences()); listener.get(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java index 6df9260b2bccf..79203899b665d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java @@ -60,6 +60,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -295,17 +296,9 @@ protected void onSendRequest(long requestId, String action, TransportRequest req assertSame(node, joiningNode); assertEquals(JoinValidationService.JOIN_VALIDATE_ACTION_NAME, action); - final var listener = new ActionListener() { - @Override - public void onResponse(TransportResponse transportResponse) { - fail("should not succeed"); - } - - @Override - public void onFailure(Exception e) { - handleError(requestId, new RemoteTransportException(node.getName(), node.getAddress(), action, e)); - } - }; + final ActionListener listener = assertNoSuccessListener( + e -> handleError(requestId, new RemoteTransportException(node.getName(), node.getAddress(), action, e)) + ); try (var ignored = NamedWriteableRegistryTests.ignoringUnknownNamedWriteables(); var out = new BytesStreamOutput()) { request.writeTo(out); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index d42b6096b6e32..87fe732d156c5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -73,7 +73,7 @@ protected DataStream doParseInstance(XContentParser parser) throws IOException { @Override protected Writeable.Reader instanceReader() { - return DataStream::new; + return DataStream::read; } @Override @@ -94,10 +94,12 @@ protected DataStream mutateInstance(DataStream instance) { var indexMode = instance.getIndexMode(); var lifecycle = instance.getLifecycle(); var failureStore = instance.isFailureStoreEnabled(); - var failureIndices = instance.getFailureIndices(); + var failureIndices = instance.getFailureIndices().getIndices(); var rolloverOnWrite = instance.rolloverOnWrite(); var autoShardingEvent = instance.getAutoShardingEvent(); - switch (between(0, 12)) { + var failureRolloverOnWrite = instance.getFailureIndices().isRolloverOnWrite(); + var failureAutoShardingEvent = instance.getBackingIndices().getAutoShardingEvent(); + switch (between(0, 14)) { case 0 -> name = randomAlphaOfLength(10); case 1 -> indices = randomNonEmptyIndexInstances(); case 2 -> generation = instance.getGeneration() + randomIntBetween(1, 10); @@ -114,6 +116,7 @@ protected DataStream mutateInstance(DataStream instance) { isReplicated = isReplicated == false; // Replicated data streams cannot be marked for lazy rollover. rolloverOnWrite = isReplicated == false && rolloverOnWrite; + failureRolloverOnWrite = isReplicated == false && failureRolloverOnWrite; } case 6 -> { if (isSystem == false) { @@ -139,7 +142,27 @@ protected DataStream mutateInstance(DataStream instance) { isReplicated = rolloverOnWrite == false && isReplicated; } case 12 -> { - autoShardingEvent = randomBoolean() && autoShardingEvent != null + if (randomBoolean() || autoShardingEvent == null) { + // If we're mutating the auto sharding event of the failure store, we need to ensure there's at least one failure index. + if (failureIndices.isEmpty()) { + failureIndices = DataStreamTestHelper.randomIndexInstances(); + failureStore = true; + } + autoShardingEvent = new DataStreamAutoShardingEvent( + failureIndices.get(failureIndices.size() - 1).getName(), + randomIntBetween(1, 10), + randomMillisUpToYear9999() + ); + } else { + autoShardingEvent = null; + } + } + case 13 -> { + failureRolloverOnWrite = failureRolloverOnWrite == false; + isReplicated = failureRolloverOnWrite == false && isReplicated; + } + case 14 -> { + failureAutoShardingEvent = randomBoolean() && failureAutoShardingEvent != null ? null : new DataStreamAutoShardingEvent( indices.get(indices.size() - 1).getName(), @@ -151,25 +174,29 @@ protected DataStream mutateInstance(DataStream instance) { return new DataStream( name, - indices, generation, metadata, isHidden, isReplicated, isSystem, + System::currentTimeMillis, allowsCustomRouting, indexMode, lifecycle, failureStore, - failureIndices, - rolloverOnWrite, - autoShardingEvent + new DataStream.DataStreamIndices(DataStream.BACKING_INDEX_PREFIX, indices, rolloverOnWrite, autoShardingEvent), + new DataStream.DataStreamIndices( + DataStream.BACKING_INDEX_PREFIX, + failureIndices, + failureRolloverOnWrite, + failureAutoShardingEvent + ) ); } public void testRollover() { DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); - Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); @@ -196,7 +223,7 @@ public void testRolloverWithConflictingBackingIndexName() { builder.put(im, false); } - final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build()); + final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build(), ds.getBackingIndices()); final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + numConflictingIndices + 1)); @@ -212,7 +239,7 @@ public void testRolloverUpgradeToTsdbDataStream() { .setReplicated(false) .setIndexMode(randomBoolean() ? IndexMode.STANDARD : null) .build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), true, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); @@ -225,7 +252,7 @@ public void testRolloverUpgradeToTsdbDataStream() { public void testRolloverDowngradeToRegularDataStream() { DataStream ds = DataStreamTestHelper.randomInstance().copy().setReplicated(false).setIndexMode(IndexMode.TIME_SERIES).build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); @@ -238,18 +265,18 @@ public void testRolloverDowngradeToRegularDataStream() { public void testRolloverFailureStore() { DataStream ds = DataStreamTestHelper.randomInstance(true).promoteDataStream(); - Tuple newCoordinates = ds.nextFailureStoreWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getFailureIndices()); final DataStream rolledDs = ds.rolloverFailureStore(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); // Ensure that the rolloverOnWrite flag hasn't changed when rolling over a failure store. assertThat(rolledDs.rolloverOnWrite(), equalTo(ds.rolloverOnWrite())); - assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); + assertThat(rolledDs.getFailureIndices().getIndices().size(), equalTo(ds.getFailureIndices().getIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); - assertTrue(rolledDs.getFailureIndices().containsAll(ds.getFailureIndices())); - assertTrue(rolledDs.getFailureIndices().contains(rolledDs.getFailureStoreWriteIndex())); + assertTrue(rolledDs.getFailureIndices().getIndices().containsAll(ds.getFailureIndices().getIndices())); + assertTrue(rolledDs.getFailureIndices().getIndices().contains(rolledDs.getFailureStoreWriteIndex())); } public void testRemoveBackingIndex() { @@ -298,15 +325,18 @@ public void testRemoveBackingWriteIndex() { public void testRemoveFailureStoreIndex() { DataStream original = createRandomDataStream(); - int indexToRemove = randomIntBetween(1, original.getFailureIndices().size() - 1); + int indexToRemove = randomIntBetween(1, original.getFailureIndices().getIndices().size() - 1); - DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().get(indexToRemove - 1)); + DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().getIndices().get(indexToRemove - 1)); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size() - 1)); - for (int k = 0; k < (original.getFailureIndices().size() - 1); k++) { - assertThat(updated.getFailureIndices().get(k), equalTo(original.getFailureIndices().get(k < (indexToRemove - 1) ? k : k + 1))); + assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size() - 1)); + for (int k = 0; k < (original.getFailureIndices().getIndices().size() - 1); k++) { + assertThat( + updated.getFailureIndices().getIndices().get(k), + equalTo(original.getFailureIndices().getIndices().get(k < (indexToRemove - 1) ? k : k + 1)) + ); } } @@ -326,7 +356,9 @@ public void testRemoveFailureStoreWriteIndex() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> original.removeFailureStoreIndex(original.getFailureIndices().get(original.getFailureIndices().size() - 1)) + () -> original.removeFailureStoreIndex( + original.getFailureIndices().getIndices().get(original.getFailureIndices().getIndices().size() - 1) + ) ); assertThat( e.getMessage(), @@ -334,7 +366,7 @@ public void testRemoveFailureStoreWriteIndex() { String.format( Locale.ROOT, "cannot remove backing index [%s] of data stream [%s] because it is the write index", - original.getFailureIndices().get(original.getFailureIndices().size() - 1).getName(), + original.getFailureIndices().getIndices().get(original.getFailureIndices().getIndices().size() - 1).getName(), original.getName() ) ) @@ -379,9 +411,9 @@ public void testAddBackingIndexThatIsPartOfAnotherDataStream() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); Index indexToAdd = randomFrom(ds2.getIndices().toArray(Index.EMPTY_ARRAY)); @@ -409,11 +441,11 @@ public void testAddBackingIndexThatIsPartOfDataStreamFailureStore() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); - Index indexToAdd = randomFrom(ds2.getFailureIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(ds2.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ds1.addBackingIndex(builder.build(), indexToAdd)); assertThat( @@ -498,7 +530,7 @@ public void testAddFailureStoreIndex() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices()); + createMetadataForIndices(builder, original.getFailureIndices().getIndices()); Index indexToAdd = new Index(randomAlphaOfLength(4), UUIDs.randomBase64UUID(random())); builder.put( @@ -514,11 +546,11 @@ public void testAddFailureStoreIndex() { assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size() + 1)); - for (int k = 1; k <= original.getFailureIndices().size(); k++) { - assertThat(updated.getFailureIndices().get(k), equalTo(original.getFailureIndices().get(k - 1))); + assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size() + 1)); + for (int k = 1; k <= original.getFailureIndices().getIndices().size(); k++) { + assertThat(updated.getFailureIndices().getIndices().get(k), equalTo(original.getFailureIndices().getIndices().get(k - 1))); } - assertThat(updated.getFailureIndices().get(0), equalTo(indexToAdd)); + assertThat(updated.getFailureIndices().getIndices().get(0), equalTo(indexToAdd)); } public void testAddFailureStoreIndexThatIsPartOfAnotherDataStream() { @@ -530,11 +562,11 @@ public void testAddFailureStoreIndexThatIsPartOfAnotherDataStream() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); - Index indexToAdd = randomFrom(ds2.getFailureIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(ds2.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -563,9 +595,9 @@ public void testAddFailureStoreIndexThatIsPartOfDataStreamBackingIndices() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); Index indexToAdd = randomFrom(ds2.getIndices().toArray(Index.EMPTY_ARRAY)); @@ -594,16 +626,16 @@ public void testAddExistingFailureStoreIndex() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices()); + createMetadataForIndices(builder, original.getFailureIndices().getIndices()); - Index indexToAdd = randomFrom(original.getFailureIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(original.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); DataStream updated = original.addFailureStoreIndex(builder.build(), indexToAdd); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration())); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size())); - assertThat(updated.getFailureIndices(), equalTo(original.getFailureIndices())); + assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size())); + assertThat(updated.getFailureIndices().getIndices(), equalTo(original.getFailureIndices().getIndices())); } public void testAddFailureStoreIndexWithAliases() { @@ -613,7 +645,7 @@ public void testAddFailureStoreIndexWithAliases() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices()); + createMetadataForIndices(builder, original.getFailureIndices().getIndices()); Index indexToAdd = new Index(randomAlphaOfLength(4), UUIDs.randomBase64UUID(random())); IndexMetadata.Builder b = IndexMetadata.builder(indexToAdd.getName()) @@ -743,11 +775,16 @@ public void testSnapshot() { var replicated = preSnapshotDataStream.isReplicated() && randomBoolean(); var postSnapshotDataStream = preSnapshotDataStream.copy() - .setIndices(postSnapshotIndices) + .setBackingIndices( + preSnapshotDataStream.getBackingIndices() + .copy() + .setIndices(postSnapshotIndices) + .setRolloverOnWrite(replicated == false && preSnapshotDataStream.rolloverOnWrite()) + .build() + ) .setGeneration(preSnapshotDataStream.getGeneration() + randomIntBetween(0, 5)) .setMetadata(preSnapshotDataStream.getMetadata() == null ? null : new HashMap<>(preSnapshotDataStream.getMetadata())) .setReplicated(replicated) - .setRolloverOnWrite(replicated == false && preSnapshotDataStream.rolloverOnWrite()) .build(); var reconciledDataStream = postSnapshotDataStream.snapshot( @@ -775,7 +812,9 @@ public void testSnapshotWithAllBackingIndicesRemoved() { var preSnapshotDataStream = DataStreamTestHelper.randomInstance(); var indicesToAdd = randomNonEmptyIndexInstances(); - var postSnapshotDataStream = preSnapshotDataStream.copy().setIndices(indicesToAdd).build(); + var postSnapshotDataStream = preSnapshotDataStream.copy() + .setBackingIndices(preSnapshotDataStream.getBackingIndices().copy().setIndices(indicesToAdd).build()) + .build(); assertNull(postSnapshotDataStream.snapshot(preSnapshotDataStream.getIndices().stream().map(Index::getName).toList())); } @@ -1769,7 +1808,6 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws isSystem, randomBoolean(), isSystem, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : null, // IndexMode.TIME_SERIES triggers validation that many unit tests doesn't pass lifecycle, @@ -1958,12 +1996,11 @@ public void testWriteFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), false, - null, + List.of(), replicated == false && randomBoolean(), null ); @@ -1977,7 +2014,6 @@ public void testWriteFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), @@ -2003,7 +2039,6 @@ public void testWriteFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), @@ -2028,12 +2063,11 @@ public void testIsFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), false, - null, + List.of(), replicated == false && randomBoolean(), null ); @@ -2051,7 +2085,6 @@ public void testIsFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), @@ -2083,7 +2116,6 @@ public void testIsFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 9a560abe20c74..d4639c3d3118e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -357,7 +357,12 @@ public void testRemoveBrokenBackingIndexReference() { var state = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>(dataStreamName, 2)), List.of()); var original = state.getMetadata().dataStreams().get(dataStreamName); var broken = original.copy() - .setIndices(List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1))) + .setBackingIndices( + original.getBackingIndices() + .copy() + .setIndices(List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1))) + .build() + ) .build(); var brokenState = ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(broken).build()).build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 50030143ec354..617e1cb09c353 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -80,6 +80,7 @@ import java.util.stream.Collectors; import static java.util.Collections.emptySet; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.cluster.service.MasterService.MAX_TASK_DESCRIPTION_CHARS; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -1041,30 +1042,22 @@ public void onFailure(Exception e) { threadContext.putHeader(testContextHeaderName, testContextHeaderValue); final var expectFailure = randomBoolean(); final var taskComplete = new AtomicBoolean(); - final var task = new Task(expectFailure, testResponseHeaderValue, new ActionListener<>() { - @Override - public void onResponse(ClusterState clusterState) { - throw new AssertionError("should not succeed"); + final var task = new Task(expectFailure, testResponseHeaderValue, assertNoSuccessListener(e -> { + assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); + assertEquals(List.of(testResponseHeaderValue), threadContext.getResponseHeaders().get(testResponseHeaderName)); + assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); + assertThat(e.getMessage(), equalTo(publicationFailedExceptionMessage)); + if (expectFailure) { + assertThat(e.getSuppressed().length, greaterThan(0)); + var suppressed = e.getSuppressed()[0]; + assertThat(suppressed, instanceOf(ElasticsearchException.class)); + assertThat(suppressed.getMessage(), equalTo(taskFailedExceptionMessage)); } - - @Override - public void onFailure(Exception e) { - assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); - assertEquals(List.of(testResponseHeaderValue), threadContext.getResponseHeaders().get(testResponseHeaderName)); - assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); - assertThat(e.getMessage(), equalTo(publicationFailedExceptionMessage)); - if (expectFailure) { - assertThat(e.getSuppressed().length, greaterThan(0)); - var suppressed = e.getSuppressed()[0]; - assertThat(suppressed, instanceOf(ElasticsearchException.class)); - assertThat(suppressed.getMessage(), equalTo(taskFailedExceptionMessage)); - } - assertNotNull(publishedState.get()); - assertNotSame(stateBeforeFailure, publishedState.get()); - assertTrue(taskComplete.compareAndSet(false, true)); - publishFailureCountdown.countDown(); - } - }); + assertNotNull(publishedState.get()); + assertNotSame(stateBeforeFailure, publishedState.get()); + assertTrue(taskComplete.compareAndSet(false, true)); + publishFailureCountdown.countDown(); + })); queue.submitTask("test", task, null); } diff --git a/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java new file mode 100644 index 0000000000000..f92097f53bb81 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.file; + +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.node.NodeRoleSettings; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MasterNodeFileWatchingServiceTests extends ESTestCase { + + static final DiscoveryNode localNode = DiscoveryNodeUtils.create("local-node"); + MasterNodeFileWatchingService testService; + Path watchedFile; + Runnable fileChangedCallback; + + @Before + public void setupTestService() throws IOException { + watchedFile = createTempFile(); + ClusterService clusterService = mock(ClusterService.class); + Settings settings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.MASTER_ROLE.roleName()) + .build(); + when(clusterService.getSettings()).thenReturn(settings); + fileChangedCallback = () -> {}; + testService = new MasterNodeFileWatchingService(clusterService, watchedFile) { + + @Override + protected void processFileChanges() throws InterruptedException, ExecutionException, IOException { + fileChangedCallback.run(); + } + + @Override + protected void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException { + // file always exists, but we don't care about the missing case for master node behavior + } + }; + testService.start(); + } + + @After + public void stopTestService() { + testService.stop(); + } + + public void testBecomingMasterNodeStartsWatcher() { + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", notRecoveredClusterState, ClusterState.EMPTY_STATE)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(false)); + + ClusterState recoveredClusterState = ClusterState.builder(notRecoveredClusterState) + .blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", recoveredClusterState, notRecoveredClusterState)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(true)); + } + + public void testChangingMasterStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + final DiscoveryNode anotherNode = DiscoveryNodeUtils.create("another-node"); + ClusterState differentMasterClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes( + DiscoveryNodes.builder().add(localNode).add(anotherNode).localNodeId(localNode.getId()).masterNodeId(anotherNode.getId()) + ) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", differentMasterClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } + + public void testBlockingClusterStateStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + ClusterState blockedClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", blockedClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 4885bbc277cb4..8e62a9306a3d4 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -119,7 +119,6 @@ private void assertExpectedLogMessages(Consumer consumer, MockLogAppende Logger testLogger = LogManager.getLogger("org.elasticsearch.test"); MockLogAppender appender = new MockLogAppender(); try (var ignored = appender.capturing("org.elasticsearch.test")) { - appender.start(); Arrays.stream(expectations).forEach(appender::addExpectation); consumer.accept(testLogger); appender.assertAllExpectationsMatched(); diff --git a/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java b/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java new file mode 100644 index 0000000000000..bfb03ea9496e5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java @@ -0,0 +1,427 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matcher; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; +import java.time.format.ResolverStyle; +import java.time.format.SignStyle; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalQueries; +import java.time.temporal.ValueRange; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static java.time.temporal.ChronoField.DAY_OF_MONTH; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.MONTH_OF_YEAR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import static java.time.temporal.ChronoField.YEAR; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class Iso8601ParserTests extends ESTestCase { + + private static Iso8601Parser defaultParser() { + return new Iso8601Parser(Set.of(), true, Map.of()); + } + + private static Matcher hasResult(DateTime dateTime) { + return transformedMatch(Iso8601Parser.Result::result, equalTo(dateTime)); + } + + private static Matcher hasError(int parseError) { + return transformedMatch(Iso8601Parser.Result::errorIndex, equalTo(parseError)); + } + + public void testStrangeParses() { + assertThat(defaultParser().tryParse("-9999-01-01", null), hasResult(new DateTime(-9999, 1, 1, null, null, null, null, null, null))); + assertThat(defaultParser().tryParse("1000", null), hasResult(new DateTime(1000, null, null, null, null, null, null, null, null))); + assertThat(defaultParser().tryParse("2023-02-02T", null), hasResult(new DateTime(2023, 2, 2, null, null, null, null, null, null))); + + // these are accepted by the previous formatters, but are not valid ISO8601 + assertThat(defaultParser().tryParse("2023-01-01T12:00:00.01,02", null), hasError(22)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Europe/Paris+0400", null), hasError(19)); + } + + public void testOutOfRange() { + assertThat(defaultParser().tryParse("2023-13-12", null), hasError(5)); + assertThat(defaultParser().tryParse("2023-12-32", null), hasError(8)); + assertThat(defaultParser().tryParse("2023-12-31T24", null), hasError(11)); + assertThat(defaultParser().tryParse("2023-12-31T23:60", null), hasError(14)); + assertThat(defaultParser().tryParse("2023-12-31T23:59:60", null), hasError(17)); + assertThat(defaultParser().tryParse("2023-12-31T23:59:59+18:30", null), hasError(19)); + } + + public void testMandatoryFields() { + assertThat( + new Iso8601Parser(Set.of(YEAR), true, Map.of()).tryParse("2023", null), + hasResult(new DateTime(2023, null, null, null, null, null, null, null, null)) + ); + assertThat(new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR), true, Map.of()).tryParse("2023", null), hasError(4)); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR), true, Map.of()).tryParse("2023-06", null), + hasResult(new DateTime(2023, 6, null, null, null, null, null, null, null)) + ); + assertThat(new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH), true, Map.of()).tryParse("2023-06", null), hasError(7)); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH), true, Map.of()).tryParse("2023-06-20", null), + hasResult(new DateTime(2023, 6, 20, null, null, null, null, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY), false, Map.of()).tryParse("2023-06-20", null), + hasError(10) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY), false, Map.of()).tryParse("2023-06-20T15", null), + hasResult(new DateTime(2023, 6, 20, 15, 0, 0, 0, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15", + null + ), + hasError(13) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15Z", + null + ), + hasError(13) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15:48", + null + ), + hasResult(new DateTime(2023, 6, 20, 15, 48, 0, 0, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48", null), + hasError(16) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48Z", null), + hasError(16) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48:09", null), + hasResult(new DateTime(2023, 6, 20, 15, 48, 9, 0, null, null)) + ); + } + + public void testParseNanos() { + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.5", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,5", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000_000, null, null)) + ); + + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.05", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.0005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,00005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,0000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.00000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,000000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5, null, null)) + ); + + // too many nanos + assertThat(defaultParser().tryParse("2023-01-01T12:00:00.0000000005", null), hasError(29)); + } + + private static Matcher hasTimezone(ZoneId offset) { + return transformedMatch(r -> r.result().query(TemporalQueries.zone()), equalTo(offset)); + } + + public void testParseTimezones() { + // using defaults + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", null), hasTimezone(null)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", ZoneOffset.UTC), hasTimezone(ZoneOffset.UTC)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", ZoneOffset.ofHours(-3)), hasTimezone(ZoneOffset.ofHours(-3))); + + // timezone specified + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Z", null), hasTimezone(ZoneOffset.UTC)); + + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-05", null), hasTimezone(ZoneOffset.ofHours(-5))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+11", null), hasTimezone(ZoneOffset.ofHours(11))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+0830", null), hasTimezone(ZoneOffset.ofHoursMinutes(8, 30))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-0415", null), hasTimezone(ZoneOffset.ofHoursMinutes(-4, -15))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+08:30", null), hasTimezone(ZoneOffset.ofHoursMinutes(8, 30))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-04:15", null), hasTimezone(ZoneOffset.ofHoursMinutes(-4, -15))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+011030", null), hasTimezone(ZoneOffset.ofHoursMinutesSeconds(1, 10, 30))); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00-074520", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(-7, -45, -20)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00+01:10:30", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(1, 10, 30)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00-07:45:20", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(-7, -45, -20)) + ); + + assertThat(defaultParser().tryParse("2023-01-01T12:00:00GMT", null), hasTimezone(ZoneId.of("GMT"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UTC", null), hasTimezone(ZoneId.of("UTC"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UT", null), hasTimezone(ZoneId.of("UT"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00GMT+3", null), hasTimezone(ZoneId.of("GMT+3"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UTC-4", null), hasTimezone(ZoneId.of("UTC-4"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UT+6", null), hasTimezone(ZoneId.of("UT+6"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Europe/Paris", null), hasTimezone(ZoneId.of("Europe/Paris"))); + + // we could be more specific in the error index for invalid timezones, + // but that would require keeping track & propagating Result objects within date-time parsing just for the ZoneId + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+04:0030", null), hasError(19)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+0400:30", null), hasError(19)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Invalid", null), hasError(19)); + } + + private static void assertEquivalent(String text, DateTimeFormatter formatter) { + TemporalAccessor expected = formatter.parse(text); + TemporalAccessor actual = defaultParser().tryParse(text, null).result(); + assertThat(actual, is(notNullValue())); + + assertThat(actual.query(TemporalQueries.localDate()), equalTo(expected.query(TemporalQueries.localDate()))); + assertThat(actual.query(TemporalQueries.localTime()), equalTo(expected.query(TemporalQueries.localTime()))); + assertThat(actual.query(TemporalQueries.zone()), equalTo(expected.query(TemporalQueries.zone()))); + } + + private static void assertEquivalentFailure(String text, DateTimeFormatter formatter) { + DateTimeParseException expected = expectThrows(DateTimeParseException.class, () -> formatter.parse(text)); + int error = defaultParser().tryParse(text, null).errorIndex(); + assertThat(error, greaterThanOrEqualTo(0)); + + assertThat(error, equalTo(expected.getErrorIndex())); + } + + public void testEquivalence() { + // test that Iso8601Parser produces the same output as DateTimeFormatter + DateTimeFormatter mandatoryFormatter = new DateTimeFormatterBuilder().append(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .appendOffset("+HHmm", "Z") + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + // just checking timezones/ids here + assertEquivalent("2023-01-01T12:00:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Z", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UT", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UTC", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00GMT", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+0500", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05:00:30", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-07", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-0715", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-07:15", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UTC+05:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00GMT-09:45:30", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Zulu", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Europe/Paris", mandatoryFormatter); + + assertEquivalentFailure("2023-01-01T12:00:00+5", mandatoryFormatter); + assertEquivalentFailure("2023-01-01T12:00:00-7", mandatoryFormatter); + assertEquivalentFailure("2023-01-01T12:00:00InvalidTimeZone", mandatoryFormatter); + + DateTimeFormatter allFieldsOptional = new DateTimeFormatterBuilder().appendValue(YEAR, 4, 4, SignStyle.EXCEEDS_PAD) + .optionalStart() + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .optionalStart() + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .optionalStart() + .appendLiteral('T') + .appendValue(HOUR_OF_DAY, 2) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .appendOffset("+HHmm", "Z") + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + assertEquivalent("2023", allFieldsOptional); + assertEquivalent("2023-04", allFieldsOptional); + assertEquivalent("2023-04-08", allFieldsOptional); + assertEquivalent("2023-04-08T13", allFieldsOptional); + assertEquivalent("2023-04-08T13:45", allFieldsOptional); + assertEquivalent("2023-04-08T13:45:50", allFieldsOptional); + assertEquivalent("-2023-04-08T13:45:50", allFieldsOptional); + } + + private static int randomValue(ValueRange range) { + assert range.isIntValue(); + return randomIntBetween((int) range.getMinimum(), (int) range.getMaximum()); + } + + public void testDefaults() { + Map defaults = Map.of( + MONTH_OF_YEAR, + randomValue(MONTH_OF_YEAR.range()), + DAY_OF_MONTH, + randomValue(DAY_OF_MONTH.range()), + HOUR_OF_DAY, + randomValue(HOUR_OF_DAY.range()), + MINUTE_OF_HOUR, + randomValue(MINUTE_OF_HOUR.range()), + SECOND_OF_MINUTE, + randomValue(SECOND_OF_MINUTE.range()), + NANO_OF_SECOND, + randomValue(NANO_OF_SECOND.range()) + ); + + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023", null), + hasResult( + new DateTime( + 2023, + defaults.get(MONTH_OF_YEAR), + defaults.get(DAY_OF_MONTH), + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01", null), + hasResult( + new DateTime( + 2023, + 1, + defaults.get(DAY_OF_MONTH), + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01", null), + hasResult( + new DateTime( + 2023, + 1, + 1, + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00", null), + hasResult( + new DateTime( + 2023, + 1, + 1, + 0, + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, defaults.get(SECOND_OF_MINUTE), defaults.get(NANO_OF_SECOND), null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00:00", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, 0, defaults.get(NANO_OF_SECOND), null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00:00.0", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, 0, 0, null, null)) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 229e2e6f72cc1..bab046d41b6e5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -68,7 +68,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new PassThroughObjectMapper.Builder("labels").setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index 6df9fd1f35f52..c02df8336a66d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -164,7 +164,15 @@ private static FieldMapper createFieldMapper(String parent, String name) { } private static ObjectMapper createObjectMapper(String name) { - return new ObjectMapper(name, name, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_TRUE, ObjectMapper.Dynamic.FALSE, emptyMap()); + return new ObjectMapper( + name, + name, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, + ObjectMapper.Dynamic.FALSE, + emptyMap() + ); } private static NestedObjectMapper createNestedObjectMapper(String name) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index 0308dac5fa216..65fa4e236bafc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -82,6 +82,7 @@ public void testSubfieldOverride() { "object", Explicit.EXPLICIT_TRUE, Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, ObjectMapper.Dynamic.TRUE, Collections.singletonMap("object.subfield", fieldMapper) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 154132c772927..69848e3b93f90 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -165,6 +165,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertFalse(objectMapper.isEnabled()); assertTrue(objectMapper.subobjects()); + assertFalse(objectMapper.trackArraySource()); // Setting 'enabled' to true is allowed, and updates the mapping. update = Strings.toString( @@ -175,6 +176,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { .field("type", "object") .field("enabled", true) .field("subobjects", false) + .field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true) .endObject() .endObject() .endObject() @@ -185,6 +187,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertTrue(objectMapper.isEnabled()); assertFalse(objectMapper.subobjects()); + assertTrue(objectMapper.trackArraySource()); } public void testFieldReplacementForIndexTemplates() throws IOException { @@ -573,6 +576,7 @@ private ObjectMapper createObjectMapperWithAllParametersSet(CheckedConsumer true, + topMapping(b -> b.startObject("_source").field("enabled", false).endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("includes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("excludes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").field("mode", "disabled").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 3085ff89603ce..e541c680ada1b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -382,7 +382,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 72abe322c702b..efa46443e2da0 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -4000,8 +4000,8 @@ static boolean hasCircularReference(Exception cause) { return false; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108321") public void testDisabledFsync() throws IOException { + var translogDir = createTempDir(); var config = new TranslogConfig( shardId, translogDir, diff --git a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java index cb57096d02744..ed9a7427f14f8 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.Task; @@ -69,8 +70,6 @@ public static class Request extends MasterNodeRequest { private String key; private String value; - Request() {} - Request(StreamInput in) throws IOException { super(in); index = in.readString(); @@ -79,6 +78,7 @@ public static class Request extends MasterNodeRequest { } public Request(final String index, final String key, final String value) { + super(TimeValue.THIRTY_SECONDS); this.index = index; this.key = key; this.value = value; diff --git a/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java b/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java index a0ad31c65c8b8..c92b0b0bf15d2 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.settings.InternalOrPrivateSettingsPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -43,6 +44,10 @@ public ValidRequest fromXContent(XContentParser parser) throws IOException { } static class ValidRequest extends MasterNodeRequest { + ValidRequest() { + super(TimeValue.THIRTY_SECONDS); + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 53ca55f8a5f81..aca5d2cbee2c9 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.reservedstate.service; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -55,7 +55,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public class FileSettingsServiceTests extends ESTestCase { private Environment env; private ClusterService clusterService; @@ -234,54 +233,11 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); }).when(spiedController).parse(any(String.class), any()); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - assertTrue(service.watching()); - - Files.createDirectories(service.watchedFileDir()); - - // Make some fake settings file to cause the file settings service to process it - writeTestFile(service.watchedFile(), "{}"); - - // we need to wait a bit, on MacOS it may take up to 10 seconds for the Java watcher service to notice the file, - // on Linux is instantaneous. Windows is instantaneous too. - assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - - // Stopping the service should interrupt the watcher thread, we should be able to stop - service.stop(); - assertFalse(service.watching()); - service.close(); - // let the deadlocked thread end, so we can cleanly exit the test - deadThreadLatch.countDown(); - } - - public void testStopWorksIfProcessingDidntReturnYet() throws Exception { - var spiedController = spy(controller); - var service = new FileSettingsService(clusterService, spiedController, env); - - CountDownLatch processFileLatch = new CountDownLatch(1); - CountDownLatch deadThreadLatch = new CountDownLatch(1); - - doAnswer((Answer) invocation -> { - // allow the other thread to continue, but hold on a bit to avoid - // completing the task immediately in the main watcher loop - try { - Thread.sleep(1_000); - } catch (InterruptedException e) { - // pass it on - Thread.currentThread().interrupt(); - } - processFileLatch.countDown(); - new Thread(() -> { - // Simulate a thread that never allows the completion to complete - try { - deadThreadLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); - }).when(spiedController).parse(any(String.class), any()); + doAnswer((Answer) invocation -> { + var completionListener = invocation.getArgument(1, ActionListener.class); + completionListener.onResponse(null); + return null; + }).when(spiedController).initEmpty(any(String.class), any()); service.start(); service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); @@ -296,7 +252,7 @@ public void testStopWorksIfProcessingDidntReturnYet() throws Exception { // on Linux is instantaneous. Windows is instantaneous too. assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - // Stopping the service should interrupt the watcher thread, allowing the whole thing to exit + // Stopping the service should interrupt the watcher thread, we should be able to stop service.stop(); assertFalse(service.watching()); service.close(); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java new file mode 100644 index 0000000000000..d887d7edb19f2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.sameInstance; + +public class ReservedStateUpdateTaskTests extends ESTestCase { + public void testBlockedClusterState() { + var task = new ReservedStateUpdateTask("dummy", null, List.of(), Map.of(), List.of(), e -> {}, ActionListener.noop()); + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + assertThat(task.execute(notRecoveredClusterState), sameInstance(notRecoveredClusterState)); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 3a4d67ae281f2..2b8bf0dad65fe 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -52,6 +52,27 @@ public void testBasicFiltering() throws IOException { assertEquals(Collections.singletonMap("field1", "value"), hitContext.hit().getSourceAsMap()); } + public void testExcludesAll() throws IOException { + XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field1", "value").field("field2", "value2").endObject(); + HitContext hitContext = hitExecute(source, false, null, null); + assertNull(hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "field1", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, null, "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "*", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, new String[] { "field1", "field2" }, new String[] { "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, null, new String[] { "field2", "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + } + public void testMultipleFiltering() throws IOException { XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").field("field2", "value2").endObject(); HitContext hitContext = hitExecuteMultiple(source, true, new String[] { "*.notexisting", "field" }, null); diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 38f8ad4766b7e..e693f9a1562fd 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -269,7 +269,7 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108104") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-serverless/issues/1874") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java index 187a8b6e4eab2..023305101f4c4 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java @@ -22,6 +22,9 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import static org.elasticsearch.test.ESTestCase.fail; public class ActionTestUtils { @@ -77,6 +80,27 @@ public static ActionListener assertNoFailureListener(CheckedConsumer ActionListener assertNoSuccessListener(Consumer consumer) { + return new ActionListener<>() { + @Override + public void onResponse(T result) { + fail(null, "unexpected success with result [%s] while expecting to handle failure with [%s]", result, consumer); + } + + @Override + public void onFailure(Exception e) { + try { + consumer.accept(e); + } catch (Exception e2) { + if (e2 != e) { + e2.addSuppressed(e); + } + fail(e2, "unexpected failure in onFailure handler for [%s]", consumer); + } + } + }; + } + public static ResponseListener wrapAsRestResponseListener(ActionListener listener) { return new ResponseListener() { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java b/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java index 115ea63fb243e..dad0e3b613efb 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java @@ -26,6 +26,7 @@ import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.action.support.ActionTestUtils.assertNoFailureListener; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.test.ESIntegTestCase.internalCluster; import static org.elasticsearch.test.ESTestCase.asInstanceOf; import static org.elasticsearch.test.ESTestCase.randomInt; @@ -37,7 +38,6 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; /** * Utility plugin that captures the invocation of an action on a node after the task has been registered with the {@link TaskManager}, @@ -128,19 +128,11 @@ public void app if (capturingListener != null) { final var cancellableTask = asInstanceOf(CancellableTask.class, task); capturingListener.addListener(assertNoFailureListener(captured -> { - cancellableTask.addListener(() -> chain.proceed(task, action, request, new ActionListener<>() { - @Override - public void onResponse(Response response) { - fail("cancelled action should not succeed, but got " + response); - } - - @Override - public void onFailure(Exception e) { - assertThat(unwrapCause(e), instanceOf(TaskCancelledException.class)); - listener.onFailure(e); - captured.countDownLatch().countDown(); - } - })); + cancellableTask.addListener(() -> chain.proceed(task, action, request, assertNoSuccessListener(e -> { + assertThat(unwrapCause(e), instanceOf(TaskCancelledException.class)); + listener.onFailure(e); + captured.countDownLatch().countDown(); + }))); assertFalse(cancellableTask.isCancelled()); captured.doCancel().run(); })); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index bbbafef514e30..c78ed54c13d8f 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -131,13 +131,10 @@ public static DataStream newInstance( @Nullable DataStreamLifecycle lifecycle, @Nullable DataStreamAutoShardingEvent autoShardingEvent ) { - return DataStream.builder(name, indices) - .setGeneration(generation) - .setMetadata(metadata) - .setReplicated(replicated) - .setLifecycle(lifecycle) - .setAutoShardingEvent(autoShardingEvent) - .build(); + return DataStream.builder( + name, + DataStream.DataStreamIndices.backingIndicesBuilder(indices).setAutoShardingEvent(autoShardingEvent).build() + ).setGeneration(generation).setMetadata(metadata).setReplicated(replicated).setLifecycle(lifecycle).build(); } public static DataStream newInstance( @@ -155,7 +152,7 @@ public static DataStream newInstance( .setReplicated(replicated) .setLifecycle(lifecycle) .setFailureStoreEnabled(failureStores.isEmpty() == false) - .setFailureIndices(failureStores) + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) .build(); } @@ -341,7 +338,6 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time boolean replicated = randomBoolean(); return new DataStream( dataStreamName, - indices, generation, metadata, randomBoolean(), @@ -352,15 +348,30 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time randomBoolean() ? IndexMode.STANDARD : null, // IndexMode.TIME_SERIES triggers validation that many unit tests doesn't pass randomBoolean() ? DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() : null, failureStore, - failureIndices, - replicated == false && randomBoolean(), - randomBoolean() - ? new DataStreamAutoShardingEvent( - indices.get(indices.size() - 1).getName(), - randomIntBetween(1, 10), - randomMillisUpToYear9999() + DataStream.DataStreamIndices.backingIndicesBuilder(indices) + .setRolloverOnWrite(replicated == false && randomBoolean()) + .setAutoShardingEvent( + randomBoolean() + ? new DataStreamAutoShardingEvent( + indices.get(indices.size() - 1).getName(), + randomIntBetween(1, 10), + randomMillisUpToYear9999() + ) + : null ) - : null + .build(), + DataStream.DataStreamIndices.failureIndicesBuilder(failureIndices) + .setRolloverOnWrite(failureStore && replicated == false && randomBoolean()) + .setAutoShardingEvent( + failureStore && randomBoolean() + ? new DataStreamAutoShardingEvent( + indices.get(indices.size() - 1).getName(), + randomIntBetween(1, 10), + randomMillisUpToYear9999() + ) + : null + ) + .build() ); } diff --git a/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java index 2b4e7fd4c7517..63b7dd88cb44e 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java @@ -74,36 +74,45 @@ protected byte[] randomReadAndSlice(IndexInput indexInput, int length) throws IO switch (readStrategy) { case 0, 1, 2, 3: if (length - readPos >= Long.BYTES && readStrategy <= 0) { - long read = indexInput.readLong(); - ByteBuffer.wrap(output, readPos, Long.BYTES).order(ByteOrder.LITTLE_ENDIAN).putLong(read); + ByteBuffer.wrap(output, readPos, Long.BYTES).order(ByteOrder.LITTLE_ENDIAN).putLong(indexInput.readLong()); readPos += Long.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readLong(indexInput.getFilePointer() - Long.BYTES)); - indexInput.seek(readPos); - } } else if (length - readPos >= Integer.BYTES && readStrategy <= 1) { - int read = indexInput.readInt(); - ByteBuffer.wrap(output, readPos, Integer.BYTES).order(ByteOrder.LITTLE_ENDIAN).putInt(read); + ByteBuffer.wrap(output, readPos, Integer.BYTES).order(ByteOrder.LITTLE_ENDIAN).putInt(indexInput.readInt()); readPos += Integer.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readInt(indexInput.getFilePointer() - Integer.BYTES)); - indexInput.seek(readPos); - } } else if (length - readPos >= Short.BYTES && readStrategy <= 2) { - short read = indexInput.readShort(); - ByteBuffer.wrap(output, readPos, Short.BYTES).order(ByteOrder.LITTLE_ENDIAN).putShort(read); + ByteBuffer.wrap(output, readPos, Short.BYTES).order(ByteOrder.LITTLE_ENDIAN).putShort(indexInput.readShort()); readPos += Short.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readShort(indexInput.getFilePointer() - Short.BYTES)); - indexInput.seek(readPos); - } } else { - byte read = indexInput.readByte(); - output[readPos++] = read; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readByte(indexInput.getFilePointer() - 1)); + output[readPos++] = indexInput.readByte(); + } + if (indexInput instanceof RandomAccessInput randomAccessInput && randomBoolean()) { + final var randomAccessReadStart = between(0, length - 1); + final int randomAccessReadEnd; + if (length - randomAccessReadStart >= Long.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Long.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putLong(randomAccessInput.readLong(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Long.BYTES; + } else if (length - randomAccessReadStart >= Integer.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Integer.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putInt(randomAccessInput.readInt(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Integer.BYTES; + } else if (length - randomAccessReadStart >= Short.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Short.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putShort(randomAccessInput.readShort(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Short.BYTES; + } else { + output[randomAccessReadStart] = randomAccessInput.readByte(randomAccessReadStart); + randomAccessReadEnd = randomAccessReadStart + 1; + } + if (randomAccessReadStart <= readPos && readPos <= randomAccessReadEnd && randomBoolean()) { + readPos = between(readPos, randomAccessReadEnd); indexInput.seek(readPos); } + + indexInput.seek(readPos); // BUG these random-access reads shouldn't affect the current position } break; case 4: diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index bea222a9d8341..80f9f2abea184 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; @@ -259,6 +260,7 @@ public static void resetPortCounter() { // TODO: consolidate logging initialization for tests so it all occurs in logconfigurator LogConfigurator.loadLog4jPlugins(); LogConfigurator.configureESLogging(); + MockLogAppender.init(); final List testAppenders = new ArrayList<>(3); for (String leakLoggerName : Arrays.asList("io.netty.util.ResourceLeakDetector", LeakTracker.class.getName())) { @@ -1058,6 +1060,11 @@ public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } + public static SecureString randomSecureStringOfLength(int codeUnits) { + var randomAlpha = randomAlphaOfLength(codeUnits); + return new SecureString(randomAlpha.toCharArray()); + } + public static String randomNullOrAlphaOfLength(int codeUnits) { return randomBoolean() ? null : randomAlphaOfLength(codeUnits); } @@ -2094,9 +2101,24 @@ protected static SecureRandom secureRandomFips(final byte[] seed) throws NoSuchA return secureRandomFips; } + /** + * The timeout used for the various "safe" wait methods such as {@link #safeAwait} and {@link #safeAcquire}. In tests we generally want + * these things to complete almost immediately, but sometimes the CI runner executes things rather slowly so we use {@code 10s} as a + * fairly relaxed definition of "immediately". + *

+ * A well-designed test should not need to wait for anything close to this duration when run in isolation. If you think you need to do + * so, instead seek a better way to write the test such that it does not need to wait for so long. Tests that take multiple seconds to + * complete are a big drag on CI times which slows everyone down. + *

+ * For instance, tests which verify things that require the passage of time ought to simulate this (e.g. using a {@link + * org.elasticsearch.common.util.concurrent.DeterministicTaskQueue}). Excessive busy-waits ought to be replaced by blocking waits (e.g. + * using a {@link CountDownLatch}) which release as soon as the condition is satisfied. + */ + public static final TimeValue SAFE_AWAIT_TIMEOUT = TimeValue.timeValueSeconds(10); + public static void safeAwait(CyclicBarrier barrier) { try { - barrier.await(10, TimeUnit.SECONDS); + barrier.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAwait: interrupted waiting for CyclicBarrier release"); @@ -2107,7 +2129,10 @@ public static void safeAwait(CyclicBarrier barrier) { public static void safeAwait(CountDownLatch countDownLatch) { try { - assertTrue("safeAwait: CountDownLatch did not reach zero within the timeout", countDownLatch.await(10, TimeUnit.SECONDS)); + assertTrue( + "safeAwait: CountDownLatch did not reach zero within the timeout", + countDownLatch.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAwait: interrupted waiting for CountDownLatch to reach zero"); @@ -2116,7 +2141,10 @@ public static void safeAwait(CountDownLatch countDownLatch) { public static void safeAcquire(Semaphore semaphore) { try { - assertTrue("safeAcquire: Semaphore did not acquire permit within the timeout", semaphore.tryAcquire(10, TimeUnit.SECONDS)); + assertTrue( + "safeAcquire: Semaphore did not acquire permit within the timeout", + semaphore.tryAcquire(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAcquire: interrupted waiting for Semaphore to acquire permit"); @@ -2127,7 +2155,7 @@ public static T safeAwait(SubscribableListener listener) { final var future = new PlainActionFuture(); listener.addListener(future); try { - return future.get(10, TimeUnit.SECONDS); + return future.get(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new AssertionError("safeAwait: interrupted waiting for SubscribableListener", e); diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 10a3a8a78e483..dd7987642c58a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Property; @@ -19,9 +18,10 @@ import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; @@ -31,12 +31,60 @@ /** * Test appender that can be used to verify that certain events were logged correctly */ -public class MockLogAppender extends AbstractAppender { +public class MockLogAppender implements Releasable { + private static final Map> mockAppenders = new ConcurrentHashMap<>(); + private static final RealMockAppender parent = new RealMockAppender(); + // TODO: this can become final once the ctor is made private + private List loggers = List.of(); private final List expectations; + private volatile boolean isAlive = true; + + @Override + public void close() { + isAlive = false; + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + assert v != null; + v.remove(this); + return v.isEmpty() ? null : v; + }); + } + // check that all expectations have been evaluated before this is released + for (WrappedLoggingExpectation expectation : expectations) { + assertThat( + "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, + expectation.assertMatchedCalled, + is(true) + ); + } + } + + private static class RealMockAppender extends AbstractAppender { + + RealMockAppender() { + super("mock", null, null, false, Property.EMPTY_ARRAY); + } + + @Override + public void append(LogEvent event) { + List appenders = mockAppenders.get(event.getLoggerName()); + if (appenders == null) { + // check if there is a root appender + appenders = mockAppenders.getOrDefault("", List.of()); + } + for (MockLogAppender appender : appenders) { + if (appender.isAlive == false) { + continue; + } + for (LoggingExpectation expectation : appender.expectations) { + expectation.match(event); + } + } + } + } public MockLogAppender() { - super("mock", null, null, false, Property.EMPTY_ARRAY); /* * We use a copy-on-write array list since log messages could be appended while we are setting up expectations. When that occurs, * we would run into a concurrent modification exception from the iteration over the expectations in #append, concurrent with a @@ -45,15 +93,21 @@ public MockLogAppender() { expectations = new CopyOnWriteArrayList<>(); } - public void addExpectation(LoggingExpectation expectation) { - expectations.add(new WrappedLoggingExpectation(expectation)); + private MockLogAppender(List loggers) { + this(); + this.loggers = loggers; } - @Override - public void append(LogEvent event) { - for (LoggingExpectation expectation : expectations) { - expectation.match(event); - } + /** + * Initialize the mock log appender with the log4j system. + */ + public static void init() { + parent.start(); + Loggers.addAppender(LogManager.getLogger(""), parent); + } + + public void addExpectation(LoggingExpectation expectation) { + expectations.add(new WrappedLoggingExpectation(expectation)); } public void assertAllExpectationsMatched() { @@ -213,7 +267,7 @@ public void assertMatched() { */ private static class WrappedLoggingExpectation implements LoggingExpectation { - private final AtomicBoolean assertMatchedCalled = new AtomicBoolean(false); + private volatile boolean assertMatchedCalled = false; private final LoggingExpectation delegate; private WrappedLoggingExpectation(LoggingExpectation delegate) { @@ -230,7 +284,7 @@ public void assertMatched() { try { delegate.assertMatched(); } finally { - assertMatchedCalled.set(true); + assertMatchedCalled = true; } } @@ -240,49 +294,57 @@ public String toString() { } } + public Releasable capturing(Class... classes) { + this.loggers = Arrays.stream(classes).map(Class::getCanonicalName).toList(); + addToMockAppenders(this, loggers); + return this; + } + + public Releasable capturing(String... names) { + this.loggers = Arrays.asList(names); + addToMockAppenders(this, loggers); + return this; + } + /** * Adds the list of class loggers to this {@link MockLogAppender}. * - * Stops ({@link #stop()}) and runs some checks on the {@link MockLogAppender} once the returned object is released. + * Stops and runs some checks on the {@link MockLogAppender} once the returned object is released. */ - public Releasable capturing(Class... classes) { - return appendToLoggers(Arrays.stream(classes).map(LogManager::getLogger).toList()); + public static MockLogAppender capture(Class... classes) { + return create(Arrays.stream(classes).map(Class::getCanonicalName).toList()); } /** * Same as above except takes string class names of each logger. */ - public Releasable capturing(String... names) { - return appendToLoggers(Arrays.stream(names).map(LogManager::getLogger).toList()); + public static MockLogAppender capture(String... names) { + return create(Arrays.asList(names)); } - private Releasable appendToLoggers(List loggers) { - start(); - for (final var logger : loggers) { - Loggers.addAppender(logger, this); + private static MockLogAppender create(List loggers) { + MockLogAppender appender = new MockLogAppender(loggers); + addToMockAppenders(appender, loggers); + return appender; + } + + private static void addToMockAppenders(MockLogAppender appender, List loggers) { + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + if (v == null) { + v = new CopyOnWriteArrayList<>(); + } + v.add(appender); + return v; + }); } - return () -> { - for (final var logger : loggers) { - Loggers.removeAppender(logger, this); - } - stop(); - // check that all expectations have been evaluated before this is released - for (WrappedLoggingExpectation expectation : expectations) { - assertThat( - "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, - expectation.assertMatchedCalled.get(), - is(true) - ); - } - }; } /** * Executes an action and verifies expectations against the provided logger */ public static void assertThatLogger(Runnable action, Class loggerOwner, MockLogAppender.LoggingExpectation expectation) { - MockLogAppender mockAppender = new MockLogAppender(); - try (var ignored = mockAppender.capturing(loggerOwner)) { + try (var mockAppender = MockLogAppender.capture(loggerOwner)) { mockAppender.addExpectation(expectation); action.run(); mockAppender.assertAllExpectationsMatched(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index 40cdacb767d0f..e05c2dde930a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -354,5 +354,10 @@ public T get(String path) { } return (T) context; } + + @Override + public String toString() { + return "JsonMapView{map=" + map + '}'; + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index fd3ba7d864f99..6dfd51c0bee5e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -105,6 +105,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -261,6 +262,43 @@ protected static Set readVersionsFromNodesInfo(RestClient adminClient) t .collect(Collectors.toUnmodifiableMap(entry -> entry.getKey().toString(), entry -> (Map) entry.getValue())); } + /** + * Does the cluster being tested support the set of capabilities + * for specified path and method. + */ + protected static Optional clusterHasCapability( + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + return clusterHasCapability(adminClient, method, path, parameters, capabilities); + } + + /** + * Does the cluster on the other side of {@code client} support the set + * of capabilities for specified path and method. + */ + protected static Optional clusterHasCapability( + RestClient client, + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + Request request = new Request("GET", "_capabilities"); + request.addParameter("method", method); + request.addParameter("path", path); + if (parameters.isEmpty() == false) { + request.addParameter("parameters", String.join(",", parameters)); + } + if (capabilities.isEmpty() == false) { + request.addParameter("capabilities", String.join(",", capabilities)); + } + Map response = entityAsMap(client.performRequest(request).getEntity()); + return Optional.ofNullable((Boolean) response.get("supported")); + } + protected static boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index ee7687398cf7b..89d10acb6ec45 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -1319,8 +1319,7 @@ public void handleException(TransportException exp) {} .build() ); - MockLogAppender appender = new MockLogAppender(); - try (var ignored = appender.capturing("org.elasticsearch.transport.TransportService.tracer")) { + try (var appender = MockLogAppender.capture("org.elasticsearch.transport.TransportService.tracer")) { //////////////////////////////////////////////////////////////////////// // tests for included action type "internal:test" diff --git a/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java new file mode 100644 index 0000000000000..4973bb83311bc --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.concurrent.atomic.AtomicBoolean; + +public class MockLogAppenderTests extends ESTestCase { + + public void testConcurrentLogAndLifecycle() throws Exception { + Logger logger = LogManager.getLogger(MockLogAppenderTests.class); + final var keepGoing = new AtomicBoolean(true); + final var logThread = new Thread(() -> { + while (keepGoing.get()) { + logger.info("test"); + } + }); + logThread.start(); + + final var appender = new MockLogAppender(); + for (int i = 0; i < 1000; i++) { + try (var ignored = appender.capturing(MockLogAppenderTests.class)) { + Thread.yield(); + } + } + + keepGoing.set(false); + logThread.join(); + } +} diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 49fb38b518dce..d555337f467ae 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,8 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); + FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), + SEMANTIC_TEXT_ENABLED("es.semantic_text_feature_flag_enabled=true", Version.fromString("8.15.0"), null); public final String systemProperty; public final Version from; diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java index 718c9c1bb0042..5292d917df630 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java @@ -65,6 +65,11 @@ public DefaultLocalClusterHandle(String name, List nodes) { this.nodes = nodes; } + @Override + public int getNumNodes() { + return nodes.size(); + } + @Override public void start() { if (started.getAndSet(true) == false) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java index 77b73e7b6ce86..7b24709b18a90 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java @@ -54,6 +54,11 @@ public void evaluate() throws Throwable { }; } + @Override + public int getNumNodes() { + return handle.getNumNodes(); + } + @Override public void start() { checkHandle(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java index 7a95d682e9ddc..acb9ef77b9e41 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java @@ -16,6 +16,12 @@ import java.io.InputStream; public interface LocalClusterHandle extends ClusterHandle { + + /** + * Returns the number of nodes that are part of this cluster. + */ + int getNumNodes(); + /** * Stops the node at a given index. * @param index of the node to stop diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 10bf2fb4b0a9f..4954065369ad9 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -16,7 +16,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.NodeSelector; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.test.rest.Stash; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; @@ -25,14 +27,19 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.function.BiPredicate; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; + /** * Execution context passed across the REST tests. * Holds the REST client used to communicate with elasticsearch. @@ -122,7 +129,15 @@ public ClientYamlTestResponse callApi( ) throws IOException { // makes a copy of the parameters before modifying them for this specific request Map requestParams = new HashMap<>(params); - requestParams.putIfAbsent("error_trace", "true"); // By default ask for error traces, this my be overridden by params + requestParams.compute("error_trace", (k, v) -> { + if (v == null) { + return "true"; // By default ask for error traces, this my be overridden by params + } else if (v.equals("false")) { + return null; + } else { + return v; + } + }); for (Map.Entry entry : requestParams.entrySet()) { if (stash.containsStashedValue(entry.getValue())) { entry.setValue(stash.getValue(entry.getValue()).toString()); @@ -264,4 +279,30 @@ public ClientYamlTestCandidate getClientYamlTestCandidate() { public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } + + public Optional clusterHasCapabilities(String method, String path, String parametersString, String capabilitiesString) { + Map params = Maps.newMapWithExpectedSize(5); + params.put("method", method); + params.put("path", path); + if (Strings.hasLength(parametersString)) { + params.put("parameters", parametersString); + } + if (Strings.hasLength(capabilitiesString)) { + params.put("capabilities", capabilitiesString); + } + params.put("error_trace", "false"); // disable error trace + try { + ClientYamlTestResponse resp = callApi("capabilities", params, emptyList(), emptyMap()); + // anything other than 200 should result in an exception, handled below + assert resp.getStatusCode() == 200 : "Unknown response code " + resp.getStatusCode(); + return Optional.ofNullable(resp.evaluate("supported")); + } catch (ClientYamlTestResponseException responseException) { + if (responseException.getRestTestResponse().getStatusCode() / 100 == 4) { + return Optional.empty(); // we don't know, the capabilities API is unsupported + } + throw new UncheckedIOException(responseException); + } catch (IOException ioException) { + throw new UncheckedIOException(ioException); + } + } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java index 1ee447da1f111..c12de7e1155a7 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -27,6 +28,7 @@ import java.util.function.Predicate; import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.joining; /** * Represents a section where prerequisites to run a specific test section or suite are specified. It is possible to specify preconditions @@ -43,16 +45,23 @@ record KnownIssue(String clusterFeature, String fixedBy) { private static final Set FIELD_NAMES = Set.of("cluster_feature", "fixed_by"); } + record CapabilitiesCheck(String method, String path, String parameters, String capabilities) { + private static final Set FIELD_NAMES = Set.of("method", "path", "parameters", "capabilities"); + } + static class PrerequisiteSectionBuilder { - String skipVersionRange = null; String skipReason = null; - String requiresReason = null; - List requiredYamlRunnerFeatures = new ArrayList<>(); + String skipVersionRange = null; List skipOperatingSystems = new ArrayList<>(); List skipKnownIssues = new ArrayList<>(); String skipAwaitsFix = null; Set skipClusterFeatures = new HashSet<>(); + List skipCapabilities = new ArrayList<>(); + + String requiresReason = null; + List requiredYamlRunnerFeatures = new ArrayList<>(); Set requiredClusterFeatures = new HashSet<>(); + List requiredCapabilities = new ArrayList<>(); enum XPackRequired { NOT_SPECIFIED, @@ -116,11 +125,21 @@ public PrerequisiteSectionBuilder skipKnownIssue(KnownIssue knownIssue) { return this; } + public PrerequisiteSectionBuilder skipIfCapabilities(CapabilitiesCheck capabilitiesCheck) { + skipCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder requireClusterFeature(String featureName) { requiredClusterFeatures.add(featureName); return this; } + public PrerequisiteSectionBuilder requireCapabilities(CapabilitiesCheck capabilitiesCheck) { + requiredCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder skipIfOs(String osName) { this.skipOperatingSystems.add(osName); return this; @@ -128,13 +147,15 @@ public PrerequisiteSectionBuilder skipIfOs(String osName) { void validate(XContentLocation contentLocation) { if ((Strings.isEmpty(skipVersionRange)) - && requiredYamlRunnerFeatures.isEmpty() && skipOperatingSystems.isEmpty() - && xpackRequired == XPackRequired.NOT_SPECIFIED - && requiredClusterFeatures.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty() - && Strings.isEmpty(skipAwaitsFix)) { + && Strings.isEmpty(skipAwaitsFix) + && xpackRequired == XPackRequired.NOT_SPECIFIED + && requiredYamlRunnerFeatures.isEmpty() + && requiredCapabilities.isEmpty() + && requiredClusterFeatures.isEmpty()) { // TODO separate the validation for requires / skip when dropping parsing of legacy fields, e.g. features in skip throw new ParsingException(contentLocation, "at least one predicate is mandatory within a skip or requires section"); } @@ -143,11 +164,12 @@ void validate(XContentLocation contentLocation) { && (Strings.isEmpty(skipVersionRange) && skipOperatingSystems.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty()) == false) { throw new ParsingException(contentLocation, "reason is mandatory within this skip section"); } - if (Strings.isEmpty(requiresReason) && (requiredClusterFeatures.isEmpty() == false)) { + if (Strings.isEmpty(requiresReason) && ((requiredClusterFeatures.isEmpty() && requiredCapabilities.isEmpty()) == false)) { throw new ParsingException(contentLocation, "reason is mandatory within this requires section"); } @@ -190,6 +212,13 @@ public PrerequisiteSection build() { if (xpackRequired == XPackRequired.YES) { requiresCriteriaList.add(Prerequisites.hasXPack()); } + if (requiredClusterFeatures.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); + } + if (requiredCapabilities.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireCapabilities(requiredCapabilities)); + } + if (xpackRequired == XPackRequired.NO) { skipCriteriaList.add(Prerequisites.hasXPack()); } @@ -199,12 +228,12 @@ public PrerequisiteSection build() { if (skipOperatingSystems.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnOsList(skipOperatingSystems)); } - if (requiredClusterFeatures.isEmpty() == false) { - requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); - } if (skipClusterFeatures.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnClusterFeatures(skipClusterFeatures)); } + if (skipCapabilities.isEmpty() == false) { + skipCriteriaList.add(Prerequisites.skipCapabilities(skipCapabilities)); + } if (skipKnownIssues.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnKnownIssue(skipKnownIssues)); } @@ -287,6 +316,7 @@ static void parseSkipSection(XContentParser parser, PrerequisiteSectionBuilder b case "os" -> parseStrings(parser, builder::skipIfOs); case "cluster_features" -> parseStrings(parser, builder::skipIfClusterFeature); case "known_issues" -> parseArray(parser, PrerequisiteSection::parseKnownIssue, builder::skipKnownIssue); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::skipIfCapabilities); default -> false; }; } @@ -337,12 +367,47 @@ private static KnownIssue parseKnownIssue(XContentParser parser) throws IOExcept if (fields.keySet().equals(KnownIssue.FIELD_NAMES) == false) { throw new ParsingException( parser.getTokenLocation(), - Strings.format("Expected fields %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) + Strings.format("Expected all of %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) ); } return new KnownIssue(fields.get("cluster_feature"), fields.get("fixed_by")); } + private static CapabilitiesCheck parseCapabilities(XContentParser parser) throws IOException { + Map fields = parser.map(); + if (CapabilitiesCheck.FIELD_NAMES.containsAll(fields.keySet()) == false) { + throw new ParsingException( + parser.getTokenLocation(), + Strings.format("Expected some of %s, but got %s", CapabilitiesCheck.FIELD_NAMES, fields.keySet()) + ); + } + Object path = fields.get("path"); + if (path == null) { + throw new ParsingException(parser.getTokenLocation(), "path is required"); + } + + return new CapabilitiesCheck( + ensureString(ensureString(fields.getOrDefault("method", "GET"))), + ensureString(path), + stringArrayAsParamString("parameters", fields), + stringArrayAsParamString("capabilities", fields) + ); + } + + private static String ensureString(Object obj) { + if (obj instanceof String str) return str; + throw new IllegalArgumentException("Expected STRING, but got: " + obj); + } + + private static String stringArrayAsParamString(String name, Map fields) { + Object value = fields.get(name); + if (value == null) return null; + if (value instanceof Collection values) { + return values.stream().map(PrerequisiteSection::ensureString).collect(joining(",")); + } + return ensureString(value); + } + static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuilder builder) throws IOException { requireStartObject("requires", parser.nextToken()); @@ -361,6 +426,7 @@ static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuild valid = switch (parser.currentName()) { case "test_runner_features" -> parseStrings(parser, f -> parseFeatureField(f, builder)); case "cluster_features" -> parseStrings(parser, builder::requireClusterFeature); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::requireCapabilities); default -> false; }; } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java index ca10101a4612c..86c035ebad62f 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java @@ -10,8 +10,11 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.function.Predicate; @@ -45,8 +48,23 @@ static Predicate skipOnClusterFeatures(Set clusterFeatures.stream().anyMatch(context::clusterHasFeature); } - static Predicate skipOnKnownIssue(List knownIssues) { + static Predicate skipOnKnownIssue(List knownIssues) { return context -> knownIssues.stream() .anyMatch(i -> context.clusterHasFeature(i.clusterFeature()) && context.clusterHasFeature(i.fixedBy()) == false); } + + static Predicate requireCapabilities(List checks) { + // requirement not fulfilled if unknown / capabilities API not supported + return context -> checks.stream().allMatch(check -> checkCapabilities(context, check).orElse(false)); + } + + static Predicate skipCapabilities(List checks) { + // skip if unknown / capabilities API not supported + return context -> checks.stream().anyMatch(check -> checkCapabilities(context, check).orElse(true)); + } + + private static Optional checkCapabilities(ClientYamlTestExecutionContext context, CapabilitiesCheck check) { + Optional b = context.clusterHasCapabilities(check.method(), check.path(), check.parameters(), check.capabilities()); + return b; + } } diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java index a77b2cc5b40f1..0bb31ae2c574a 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.yaml.YamlXContent; @@ -20,8 +21,11 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.contains; @@ -36,6 +40,8 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -357,8 +363,8 @@ public void testParseSkipSectionIncompleteKnownIssues() throws Exception { e.getMessage(), is( oneOf( - ("Expected fields [cluster_feature, fixed_by], but got [cluster_feature]"), - ("Expected fields [fixed_by, cluster_feature], but got [cluster_feature]") + ("Expected all of [cluster_feature, fixed_by], but got [cluster_feature]"), + ("Expected all of [fixed_by, cluster_feature], but got [cluster_feature]") ) ) ); @@ -498,6 +504,42 @@ public void testParseRequireAndSkipSectionsClusterFeatures() throws Exception { assertThat(parser.nextToken(), nullValue()); } + public void testParseRequireAndSkipSectionsCapabilities() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + - requires: + capabilities: + - path: /a + - method: POST + path: /b + parameters: [param1, param2] + - method: PUT + path: /c + capabilities: [a, b, c] + reason: required to run test + - skip: + capabilities: + - path: /d + parameters: param1 + capabilities: a + reason: undesired if supported + """); + + var skipSectionBuilder = PrerequisiteSection.parseInternal(parser); + assertThat(skipSectionBuilder, notNullValue()); + assertThat( + skipSectionBuilder.requiredCapabilities, + contains( + new CapabilitiesCheck("GET", "/a", null, null), + new CapabilitiesCheck("POST", "/b", "param1,param2", null), + new CapabilitiesCheck("PUT", "/c", null, "a,b,c") + ) + ); + assertThat(skipSectionBuilder.skipCapabilities, contains(new CapabilitiesCheck("GET", "/d", "param1", "a"))); + + assertThat(parser.currentToken(), equalTo(XContentParser.Token.END_ARRAY)); + assertThat(parser.nextToken(), nullValue()); + } + public void testParseRequireAndSkipSectionMultipleClusterFeatures() throws Exception { parser = createParser(YamlXContent.yamlXContent, """ - requires: @@ -659,6 +701,43 @@ public void testSkipKnownIssue() { assertFalse(section.skipCriteriaMet(mockContext)); } + public void testEvaluateCapabilities() { + List skipCapabilities = List.of( + new CapabilitiesCheck("GET", "/s", null, "c1,c2"), + new CapabilitiesCheck("GET", "/s", "p1,p2", "c1") + ); + List requiredCapabilities = List.of( + new CapabilitiesCheck("GET", "/r", null, null), + new CapabilitiesCheck("GET", "/r", "p1", null) + ); + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipCapabilities(skipCapabilities)), + "skip", + List.of(Prerequisites.requireCapabilities(requiredCapabilities)), + "required", + emptyList() + ); + + var context = mock(ClientYamlTestExecutionContext.class); + + // when the capabilities API is unavailable: + assertTrue(section.skipCriteriaMet(context)); // always skip if unavailable + assertFalse(section.requiresCriteriaMet(context)); // always fail requirements / skip if unavailable + + when(context.clusterHasCapabilities(anyString(), anyString(), any(), any())).thenReturn(Optional.of(FALSE)); + assertFalse(section.skipCriteriaMet(context)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/s", null, "c1,c2")).thenReturn(Optional.of(TRUE)); + assertTrue(section.skipCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", null, null)).thenReturn(Optional.of(TRUE)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", "p1", null)).thenReturn(Optional.of(TRUE)); + assertTrue(section.requiresCriteriaMet(context)); + } + public void evaluateEmpty() { var section = new PrerequisiteSection(List.of(), "unsupported", List.of(), "required", List.of()); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index b8e4f77f7da7b..0cbe3786fc03c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -551,6 +551,11 @@ public void write(XContentBuilder b) throws IOException { b.endObject(); } + + @Override + public String fieldName() { + return name(); + } }; } } diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml index 3ca15224dafc4..75671948de11a 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml @@ -8,3 +8,7 @@ template: sort: field: "@timestamp" order: desc + mapping: + ignore_malformed: true + total_fields: + ignore_dynamic_beyond_limit: true diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml index e6c84b6ed06f9..819d5d7eafb8e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml @@ -6,3 +6,9 @@ _meta: template: settings: codec: best_compression + mapping: + # apm@settings sets `ignore_malformed: true`, but we need + # to disable this for metrics since they use synthetic source, + # and this combination is incompatible with the + # aggregate_metric_double field type. + ignore_malformed: false diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 0e27e454f867d..772057d4931a3 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 3 +version: 4 component-templates: # Data lifecycle. diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml new file mode 100644 index 0000000000000..97265a9b81a75 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml @@ -0,0 +1,100 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + + - do: + cluster.put_component_template: + name: "logs-apm.app@custom" + body: + template: + settings: + mapping: + total_fields: + limit: 20 + +--- +"Test ignore_malformed": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # Passing a (non-coercable) string into a numeric field should not + # cause an indexing failure; it should just not be indexed. + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": "string"}}' + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": 123}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["numeric_labels.*", "_ignored"] + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields: {"_ignored": ["numeric_labels.key"]} } + - match: { hits.hits.1.fields: {"numeric_labels.key": [123.0]} } + +--- +"Test ignore_dynamic_beyond_limit": + - do: + bulk: + index: logs-apm.app.svc1-testing + refresh: true + body: + - create: {} + - {"@timestamp": "2017-06-22", "k1": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k2": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k3": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k4": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k5": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k6": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k7": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k8": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k9": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k10": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k11": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k12": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k13": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k14": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k15": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k16": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k17": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k18": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k19": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k20": ""} + + - is_false: errors + + - do: + search: + index: logs-apm.app.svc1-testing + body: + query: + term: + _ignored: + value: k20 + - length: { hits.hits: 1 } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java index d3be1816924fb..9b44daf6dd427 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java @@ -34,6 +34,7 @@ public String name() { } public Request(final String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = Objects.requireNonNull(name); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java index 4a356f74e03f8..90c2d664b421d 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java @@ -39,6 +39,7 @@ public static class Request extends AcknowledgedRequest roles, final SortedMap deciders) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.roles = roles; this.deciders = deciders; diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 28983fe34df91..2f8cccdc303e6 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -816,7 +816,10 @@ private SingleForecast forecast(Metadata metadata, DataStream stream, long forec Map newIndices = new HashMap<>(); for (int i = 0; i < numberNewIndices; ++i) { final String uuid = UUIDs.randomBase64UUID(); - final Tuple rolledDataStreamInfo = stream.unsafeNextWriteIndexAndGeneration(state.metadata()); + final Tuple rolledDataStreamInfo = stream.unsafeNextWriteIndexAndGeneration( + state.metadata(), + stream.getBackingIndices() + ); stream = stream.unsafeRollover( new Index(rolledDataStreamInfo.v1(), uuid), rolledDataStreamInfo.v2(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 0a0cade089fab..a0917c1cef815 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -330,11 +330,12 @@ static DataStream updateLocalDataStream( // just copying the data stream is in this case safe. return remoteDataStream.copy() .setName(localDataStreamName) - .setIndices(List.of(backingIndexToFollow)) + .setBackingIndices( + // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense + // (and potentially even break things). + remoteDataStream.getBackingIndices().copy().setIndices(List.of(backingIndexToFollow)).setRolloverOnWrite(false).build() + ) .setReplicated(true) - // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense - // (and potentially even break things). - .setRolloverOnWrite(false) .build(); } else { if (localDataStream.isReplicated() == false) { @@ -376,7 +377,7 @@ static DataStream updateLocalDataStream( } return localDataStream.copy() - .setIndices(backingIndices) + .setBackingIndices(localDataStream.getBackingIndices().copy().setIndices(backingIndices).build()) .setGeneration(remoteDataStream.getGeneration()) .setMetadata(remoteDataStream.getMetadata()) .build(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java index 5883c36c9e2c5..9e8e707db6b86 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java @@ -14,7 +14,9 @@ public class GetBasicStatusRequest extends MasterNodeReadRequest { - public GetBasicStatusRequest() {} + public GetBasicStatusRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetBasicStatusRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java index 93a0206ac70c3..cae967058fb73 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java @@ -14,7 +14,9 @@ public class GetTrialStatusRequest extends MasterNodeReadRequest { - public GetTrialStatusRequest() {} + public GetTrialStatusRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetTrialStatusRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java index 602e521fe10e3..7e9b0ebf44bee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java @@ -16,7 +16,9 @@ public class PostStartBasicRequest extends AcknowledgedRequest) () -> "unexpected failure during [" + TASK_SOURCE + "]", e); + var state = clusterService.lifecycleState(); + if (state == Lifecycle.State.STOPPED || state == Lifecycle.State.CLOSED) { + logger.debug("node shutdown during [" + TASK_SOURCE + "]", e); + } else { + logger.error("unexpected failure during [" + TASK_SOURCE + "]", e); + } } private ClusterState extendBasic(ClusterState currentState, LicensesMetadata currentLicenseMetadata) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java index 398b39b12aa19..e5fbc9e07955c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -18,7 +18,9 @@ public class XPackUsageRequest extends MasterNodeRequest { - public XPackUsageRequest() {} + public XPackUsageRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public XPackUsageRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java index f32fd515e7817..d1d04088dcdd7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java @@ -27,6 +27,7 @@ public class FreezeRequest extends AcknowledgedRequest implements private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT; public FreezeRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java index e96c6a7632ec1..ea4e53aced5fe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java @@ -14,7 +14,9 @@ public class GetLicenseRequest extends MasterNodeReadRequest { - public GetLicenseRequest() {} + public GetLicenseRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetLicenseRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java index e6b087c97cdb5..6584dcc279e85 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java @@ -53,6 +53,7 @@ public static MigrateToDataTiersRequest parse(XContentParser parser) throws IOEx } public MigrateToDataTiersRequest(@Nullable String legacyTemplateToDelete, @Nullable String nodeAttributeName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.legacyTemplateToDelete = legacyTemplateToDelete; this.nodeAttributeName = nodeAttributeName; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java index 3d46b2dd5070f..6270c27ac463f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java @@ -44,6 +44,7 @@ public static SetResetModeActionRequest disabled(boolean deleteMetadata) { } SetResetModeActionRequest(boolean enabled, Boolean deleteMetadata) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.enabled = enabled; this.deleteMetadata = deleteMetadata != null && deleteMetadata; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java index 9a623ede96f02..3581b9db19887 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java @@ -34,7 +34,7 @@ public GetAsyncStatusRequest(String id) { public GetAsyncStatusRequest(StreamInput in) throws IOException { super(in); this.id = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.keepAlive = in.readTimeValue(); } } @@ -43,7 +43,7 @@ public GetAsyncStatusRequest(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); - if (out.getTransportVersion().onOrAfter(TransportVersions.ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeTimeValue(keepAlive); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java index 300d2844b7a2a..df917b4e97b7d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java @@ -34,6 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean active; public Request(final String name, final boolean active) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.active = active; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java index b12f7bf2dc06a..b187e5e39dd33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java @@ -45,7 +45,9 @@ public Request(StreamInput in) throws IOException { } } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java index 8e7e9f8605245..e38a1cfd4a2cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java @@ -32,6 +32,7 @@ public static class Request extends AcknowledgedRequest { private final String name; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java index c405e4e81ff19..d979a4cf44b9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java @@ -41,7 +41,9 @@ public static class Request extends MasterNodeReadRequest { private String[] followerIndices; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public String[] getFollowerIndices() { return followerIndices; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java index 70f4f256c87e2..bd6ab5bb5af44 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -34,7 +34,9 @@ public static class Request extends MasterNodeReadRequest { private String name; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java index 7ad8e5881e443..c6905b2d06a34 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java @@ -31,6 +31,7 @@ public static class Request extends MasterNodeRequest { private final String followIndex; public Request(String followIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.followIndex = Objects.requireNonNull(followIndex, "followIndex"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 92902aa9962ab..333171d864c4f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -85,7 +85,9 @@ public static Request fromXContent(XContentParser parser, String name) throws IO private FollowParameters parameters = new FollowParameters(); private List leaderIndexExclusionPatterns = Collections.emptyList(); - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 6570fb66a2755..db1e84aca9cda 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -85,7 +85,9 @@ public static Request fromXContent(final XContentParser parser) throws IOExcepti private FollowParameters parameters = new FollowParameters(); private ActiveShardCount waitForActiveShards = ActiveShardCount.NONE; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getFollowerIndex() { return followerIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index 4cd84733b19e0..12ddea8d99578 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -54,7 +54,9 @@ public static Request fromXContent(final XContentParser parser, final String fol private String followerIndex; private FollowParameters parameters = new FollowParameters(); - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public String getFollowerIndex() { return followerIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java index 808df5f8bccb0..9a5f011f39a1b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java @@ -34,6 +34,7 @@ public static class Request extends AcknowledgedRequest implements Indi private final String followerIndex; public Request(String followerIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.followerIndex = followerIndex; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java index e444232291101..82f98176838ee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java @@ -30,6 +30,7 @@ public static class Request extends MasterNodeRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java index 779ea535f74d9..5d629365a8096 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java @@ -34,6 +34,7 @@ public static class Request extends MasterNodeRequest { private boolean waitForCompletion; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = Objects.requireNonNull(name, "name cannot be null"); this.waitForCompletion = true; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java index ef8229b407b56..37851a3641ebd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java @@ -39,10 +39,12 @@ public static class Request extends MasterNodeReadRequest { private final List names; public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = new ArrayList<>(); } public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Arrays.asList(names); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java index 4ebbb75239879..d1031828e0522 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java @@ -37,6 +37,7 @@ public static class Request extends MasterNodeRequest { private String policyName; public Request(String policyName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.policyName = policyName; } @@ -42,7 +43,9 @@ public Request(StreamInput in) throws IOException { policyName = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getPolicyName() { return policyName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java index 41b29365b8866..d359498f33621 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java @@ -104,6 +104,7 @@ public static class Request extends AcknowledgedRequest { private final String[] policyNames; public Request(String... policyNames) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); if (policyNames == null) { throw new IllegalArgumentException("ids cannot be null"); } @@ -116,6 +117,7 @@ public Request(StreamInput in) throws IOException { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); policyNames = Strings.EMPTY_ARRAY; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java index fe6754b735ef7..ebaaf42246251 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java @@ -38,6 +38,7 @@ public class PutLifecycleRequest extends AcknowledgedRequest { private final XContentType contentType; public Request(TaskType taskType, String inferenceEntityId, BytesReference content, XContentType contentType) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.taskType = taskType; this.inferenceEntityId = inferenceEntityId; this.content = content; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java index 631aa77a282ef..f82ee8b73c7a2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -27,6 +28,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.TransportVersions.ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT; +import static org.elasticsearch.TransportVersions.ML_RERANK_DOC_OPTIONAL; public class RankedDocsResults implements InferenceServiceResults { public static final String NAME = "rerank_service_results"; @@ -66,7 +68,11 @@ public static ConstructingObjectParser createParser(boo * @param relevanceScore * @param text */ - public record RankedDoc(int index, float relevanceScore, String text) implements Writeable, ToXContentObject { + public record RankedDoc(int index, float relevanceScore, @Nullable String text) + implements + Comparable, + Writeable, + ToXContentObject { public static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { ConstructingObjectParser parser = new ConstructingObjectParser<>( @@ -77,7 +83,7 @@ public static ConstructingObjectParser createParser(boolean ign ); parser.declareInt(ConstructingObjectParser.constructorArg(), INDEX_FIELD); parser.declareFloat(ConstructingObjectParser.constructorArg(), RELEVANCE_SCORE_FIELD); - parser.declareString(ConstructingObjectParser.constructorArg(), TEXT_FIELD); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TEXT_FIELD); return parser; } @@ -95,7 +101,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(INDEX, index); builder.field(RELEVANCE_SCORE, relevanceScore); - builder.field(TEXT, text); + if (text != null) { + builder.field(TEXT, text); + } builder.endObject(); @@ -103,7 +111,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public static RankedDoc of(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { + if (in.getTransportVersion().onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + return new RankedDoc(in.readInt(), in.readFloat(), in.readOptionalString()); + } else if (in.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { return new RankedDoc(in.readInt(), in.readFloat(), in.readString()); } else { return new RankedDoc(Integer.parseInt(in.readString()), Float.parseFloat(in.readString()), in.readString()); @@ -112,14 +122,18 @@ public static RankedDoc of(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { + if (out.getTransportVersion().onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + out.writeInt(index); + out.writeFloat(relevanceScore); + out.writeOptionalString(text); + } else if (out.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { out.writeInt(index); out.writeFloat(relevanceScore); - out.writeString(text); + out.writeString(text == null ? "" : text); } else { out.writeString(Integer.toString(index)); out.writeString(Float.toString(relevanceScore)); - out.writeString(text); + out.writeString(text == null ? "" : text); } } @@ -127,6 +141,11 @@ public Map asMap() { return Map.of(NAME, Map.of(INDEX, index, RELEVANCE_SCORE, relevanceScore, TEXT, text)); } + @Override + public int compareTo(RankedDoc other) { + return Float.compare(other.relevanceScore, this.relevanceScore); + } + public String toString() { return "RankedDoc{" + "index='" diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java index 00064138f0362..2984c203ded31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java @@ -133,10 +133,10 @@ public Request(StreamInput in) throws IOException { this.previouslyLicensed = in.readOptionalBoolean(); this.inferenceTimeout = in.readOptionalTimeValue(); this.highPriority = in.readBoolean(); - // The prefixType was added prior to TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED but we're serializing it now + // The prefixType was added prior to TransportVersions.V_8_13_0 but we're serializing it now // as a safety measure. At the time of writing this it doesn't have to be serialized because this class is only used internally // and on a single node so it never actually gets serialized. But we'll do it just in case that changes in the future. - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.prefixType = in.readEnum(TrainedModelPrefixStrings.PrefixType.class); } } @@ -209,7 +209,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalBoolean(previouslyLicensed); out.writeOptionalTimeValue(inferenceTimeout); out.writeBoolean(highPriority); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(prefixType); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java index 23fed34d6889e..9b383b2652af4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java @@ -36,6 +36,7 @@ public static class Request extends MasterNodeRequest { private final StartTrainedModelDeploymentAction.TaskParams taskParams; public Request(StartTrainedModelDeploymentAction.TaskParams taskParams) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskParams = ExceptionsHelper.requireNonNull(taskParams, "taskParams"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java index 5c5e02559b1d5..40560f11b5039 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java @@ -36,6 +36,7 @@ public Request(StreamInput in) throws IOException { } public Request(String calendarId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java index 7d37dc8716387..efd35a3ba87f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java @@ -38,6 +38,7 @@ public Request(StreamInput in) throws IOException { } public Request(String calendarId, String eventId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName()); this.eventId = ExceptionsHelper.requireNonNull(eventId, ScheduledEvent.EVENT_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index 48323692b7915..82d6c36273539 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -48,6 +48,7 @@ public Request(StreamInput in) throws IOException { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); ackTimeout(DEFAULT_TIMEOUT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index 2681fadf8fc59..f25be9cd164a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -37,6 +37,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private boolean force; public Request(String datafeedId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java index 50cec50b2e255..782c7fa4a4db1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java @@ -38,6 +38,7 @@ public Request(StreamInput in) throws IOException { } public Request(String filterId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.filterId = ExceptionsHelper.requireNonNull(filterId, FILTER_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java index f3e888ef9599c..5bf6a8e38e18d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java @@ -40,6 +40,7 @@ public Request(StreamInput in) throws IOException { } public Request(String jobId, String forecastId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.forecastId = ExceptionsHelper.requireNonNull(forecastId, ForecastRequestStats.FORECAST_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 58b67e57acf26..99b045d19bdd0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -44,6 +44,7 @@ public static class Request extends AcknowledgedRequest { private boolean deleteUserAnnotations; public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java index 9cd19eab449a3..d76c4e2db064a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java @@ -48,6 +48,7 @@ public Request(StreamInput in) throws IOException { } public Request(String id) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.id = ExceptionsHelper.requireNonNull(id, TrainedModelConfig.MODEL_ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java index 507060b1e51a4..27e895df5d415 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest { private final String modelId; public Request(String modelAlias, String modelId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java index 04f1b3ddb2e26..9254d9ecc1425 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java @@ -30,6 +30,7 @@ public static class Request extends MasterNodeRequest { private final String modelId; public Request(String modelId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, "model_id"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java index 64b042b61c2b6..305ed8c4fc607 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java @@ -60,6 +60,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java index b270c4506ba4a..8fb1f3a91ab8b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java @@ -29,6 +29,7 @@ public static class Request extends MasterNodeRequest { private String[] jobIds; public Request(String[] jobIds) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobIds = jobIds; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java index bdba626676b2d..c24fc159769e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java @@ -27,11 +27,11 @@ private FlushTrainedModelCacheAction() { public static class Request extends AcknowledgedRequest { public Request() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); } Request(TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); } public Request(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 1bd266c68a65a..e509b84b06ae1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -50,6 +50,7 @@ public Request(String datafeedId) { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); local(true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 1a63eda0d687d..fafb9afa99f85 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -70,6 +70,7 @@ public static class Request extends MasterNodeReadRequest { private boolean allowNoMatch = true; public Request(String datafeedId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java index e5542593df4e4..ec49603c89cb8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java @@ -61,6 +61,7 @@ public static class Request extends MasterNodeReadRequest { + public static class Request extends MasterNodeRequest { - public Request(TimeValue timeout) { - super(timeout); + private final TimeValue requestTimeout; + + public Request(TimeValue masterNodeTimeout, TimeValue requestTimeout) { + super(masterNodeTimeout); + this.requestTimeout = Objects.requireNonNull(requestTimeout); } public Request(StreamInput in) throws IOException { super(in); + this.requestTimeout = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeTimeValue(this.requestTimeout); + } + + public TimeValue requestTimeout() { + return requestTimeout; } @Override @@ -50,9 +65,14 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, "get_ml_autoscaling_resources", parentTaskId, headers); } + @Override + public ActionRequestValidationException validate() { + return null; + } + @Override public int hashCode() { - return Objects.hash(ackTimeout()); + return Objects.hash(requestTimeout); } @Override @@ -64,7 +84,7 @@ public boolean equals(Object obj) { return false; } GetMlAutoscalingStats.Request other = (GetMlAutoscalingStats.Request) obj; - return Objects.equals(ackTimeout(), other.ackTimeout()); + return Objects.equals(requestTimeout, other.requestTimeout); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java index 0be6e152d907e..eb41ff4ce870d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java @@ -138,7 +138,7 @@ public Request(StreamInput in) throws IOException { } else { prefixType = TrainedModelPrefixStrings.PrefixType.NONE; } - if (in.getTransportVersion().onOrAfter(TransportVersions.NLP_DOCUMENT_CHUNKING_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { chunkResults = in.readBoolean(); } else { chunkResults = false; @@ -232,7 +232,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeEnum(prefixType); } - if (out.getTransportVersion().onOrAfter(TransportVersions.NLP_DOCUMENT_CHUNKING_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(chunkResults); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java index e8b345b3c3ff6..4664dbe8f7bc0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java @@ -68,6 +68,7 @@ public static class Request extends AcknowledgedRequest { private final String nodeId; public Request(String nodeId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "nodeId"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index b6f852605db9f..cf17a828930c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -55,10 +55,12 @@ public static Request parseRequest(String jobId, XContentParser parser) { private JobParams jobParams; public Request(JobParams jobParams) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobParams = Objects.requireNonNull(jobParams); } public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobParams = new JobParams(jobId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index fe26cdb0377fd..82db002e42043 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -62,6 +62,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 12e9b4f2967d0..f79d2af49f536 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -43,6 +43,7 @@ public static Request parseRequest(String datafeedId, IndicesOptions indicesOpti private final DatafeedConfig datafeed; public Request(DatafeedConfig datafeed) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.datafeed = datafeed; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index 9d8fca699df2d..60d7f0008c0de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -51,6 +51,7 @@ public static Request parseRequest(String jobId, XContentParser parser, IndicesO public Request(Job.Builder jobBuilder) { // Validate the jobBuilder immediately so that errors can be detected prior to transportation. + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); jobBuilder.validateInputFields(); // Validate that detector configs are unique. // This validation logically belongs to validateInputFields call but we perform it only for PUT action to avoid BWC issues which diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java index 2e5a475369510..25d32d19aef8d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java @@ -75,6 +75,7 @@ public Request(TrainedModelConfig config, boolean deferDefinitionDecompression) } public Request(TrainedModelConfig config, boolean deferDefinitionDecompression, boolean waitForCompletion) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; this.deferDefinitionDecompression = deferDefinitionDecompression; this.waitForCompletion = waitForCompletion; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java index 9f0b5880f5c51..3ba91390f10d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java @@ -48,6 +48,7 @@ public static class Request extends AcknowledgedRequest { private final boolean reassign; public Request(String modelAlias, String modelId, boolean reassign) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.reassign = reassign; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java index b7fcb98426cc0..a588f74426993 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java @@ -76,6 +76,7 @@ public Request( int totalParts, boolean allowOverwriting ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.definition = ExceptionsHelper.requireNonNull(definition, DEFINITION); this.part = part; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java index 1abae7be95011..106f37a378897 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java @@ -70,6 +70,7 @@ public Request( @Nullable List scores, boolean allowOverwriting ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.vocabulary = ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY); this.merges = Optional.ofNullable(merges).orElse(List.of()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java index bc74f16eea0e5..548fd80da73de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java @@ -57,6 +57,7 @@ public static class Request extends AcknowledgedRequest { private boolean deleteUserAnnotations; public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index eb975133e71eb..0dd6fd8b59669 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -63,7 +63,9 @@ public static Request parseRequest(String jobId, String snapshotId, XContentPars private boolean deleteInterveningResults; private boolean force; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -74,6 +76,7 @@ public Request(StreamInput in) throws IOException { } public Request(String jobId, String snapshotId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, SNAPSHOT_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java index 9a1574bd2b036..821caf001f3e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java @@ -43,6 +43,7 @@ public static class Request extends AcknowledgedRequest implements ToXC } public Request(boolean enabled) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.enabled = enabled; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java index 67abda2b3eb64..00e6a546be5a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java @@ -72,6 +72,7 @@ public static Request parseRequest(String id, XContentParser parser) { private TimeValue timeout = DEFAULT_TIMEOUT; public Request(String id) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setId(id); } @@ -81,7 +82,9 @@ public Request(StreamInput in) throws IOException { timeout = in.readTimeValue(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public final void setId(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameAnalyticsConfig.ID); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 18763a78fa456..deeed6df87064 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -66,14 +66,17 @@ public static Request parseRequest(String datafeedId, XContentParser parser) { private DatafeedParams params; public Request(String datafeedId, long startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = new DatafeedParams(datafeedId, startTime); } public Request(String datafeedId, String startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = new DatafeedParams(datafeedId, startTime); } public Request(DatafeedParams params) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = params; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 8d9da97538e11..b3cf9f16c3c82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -140,9 +140,12 @@ public static Request parseRequest(String modelId, String deploymentId, XContent private int queueCapacity = 1024; private Priority priority = Priority.NORMAL; - private Request() {} + private Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String modelId, String deploymentId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setModelId(modelId); setDeploymentId(deploymentId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java index d23f222b9687b..513a4d7b2ea8e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java @@ -57,6 +57,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfigUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.update = update; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java index 694ca39d9cd49..0757f1f1dc7e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java @@ -43,6 +43,7 @@ public static Request parseRequest(String datafeedId, @Nullable IndicesOptions i private DatafeedUpdate update; public Request(DatafeedUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.update = update; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index 15cd272d12b8b..33856bfcefbb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -46,6 +46,7 @@ public Request(String jobId, JobUpdate update) { } private Request(String jobId, JobUpdate update, boolean isInternal) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = jobId; this.update = update; this.isInternal = isInternal; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java index 5cd55a201c45d..fd1b179da8919 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java @@ -33,6 +33,7 @@ public static class Request extends MasterNodeRequest { private final RoutingInfoUpdate update; public Request(String nodeId, String deploymentId, RoutingInfoUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "node_id"); this.deploymentId = ExceptionsHelper.requireNonNull(deploymentId, "deployment_id"); this.update = ExceptionsHelper.requireNonNull(update, "update"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index bb113a9b3e1e8..62a7d84c60a62 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -64,9 +64,12 @@ public static Request parseRequest(String deploymentId, XContentParser parser) { private String deploymentId; private int numberOfAllocations; - private Request() {} + private Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(String deploymentId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); setDeploymentId(deploymentId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java index 7fbcffa476159..abe481c926fdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java @@ -71,6 +71,7 @@ public static UpgradeJobModelSnapshotAction.Request parseRequest(XContentParser } public Request(String jobId, String snapshotId, TimeValue timeValue, boolean waitForCompletion) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID); this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, SNAPSHOT_ID); this.timeout = timeValue == null ? DEFAULT_TIMEOUT : timeValue; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java index b8b75e2bf7eb4..412ccfa7b24a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java @@ -58,6 +58,10 @@ public String getResultsField() { return resultsField; } + public double score() { + return score; + } + @Override public Double predictedValue() { return score; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java index 2ddbf8bd63f49..4e914cba1ff0d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java @@ -69,6 +69,13 @@ public static TextSimilarityConfigUpdate fromMap(Map map) { private final String resultsField; private final TextSimilarityConfig.SpanScoreFunction spanScoreFunction; + public TextSimilarityConfigUpdate(String text) { + super((TokenizationUpdate) null); + this.text = ExceptionsHelper.requireNonNull(text, TEXT); + this.resultsField = null; + this.spanScoreFunction = null; + } + public TextSimilarityConfigUpdate( String text, @Nullable String resultsField, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java index 8fcc977e3faeb..ea67dfdfb1857 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java @@ -37,10 +37,12 @@ public static class Request extends MasterNodeRequest { - public MonitoringMigrateAlertsRequest() {} + public MonitoringMigrateAlertsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public MonitoringMigrateAlertsRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index 06a6b4c2a072c..7f1e81164a513 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -38,6 +38,7 @@ public static class Request extends AcknowledgedRequest implements Indi private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); public Request(RollupJobConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } @@ -48,6 +49,7 @@ public Request(StreamInput in) throws IOException { public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); } public static Request fromXContent(final XContentParser parser, final String id) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java index 3cb7b5b07fc1b..fba742e288032 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java @@ -101,6 +101,7 @@ public MountSearchableSnapshotRequest( boolean waitForCompletion, Storage storage ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.mountedIndexName = Objects.requireNonNull(mountedIndexName); this.repositoryName = Objects.requireNonNull(repositoryName); this.snapshotName = Objects.requireNonNull(snapshotName); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java index 039ed8aa5fb64..f85ca260c3fff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java @@ -166,16 +166,4 @@ public void writeTo(StreamOutput out) throws IOException { public ExpressionRoleMapping getMapping() { return new ExpressionRoleMapping(name, rules, roles, roleTemplates, metadata, enabled); } - - public static PutRoleMappingRequest fromMapping(ExpressionRoleMapping mapping) { - var request = new PutRoleMappingRequest(); - request.setName(mapping.getName()); - request.setEnabled(mapping.isEnabled()); - request.setRoles(mapping.getRoles()); - request.setRoleTemplates(mapping.getRoleTemplates()); - request.setRules(mapping.getExpression()); - request.setMetadata(mapping.getMetadata()); - - return request; - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java index 88a930063190b..d46c21f080308 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java @@ -9,8 +9,7 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; @@ -35,8 +34,8 @@ public PutRoleMappingRequestBuilder(ElasticsearchClient client) { /** * Populate the put role request from the source and the role's name */ - public PutRoleMappingRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, source, xContentType); + public PutRoleMappingRequestBuilder source(String name, XContentParser parser) throws IOException { + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); request.setName(name); request.setEnabled(mapping.isEnabled()); request.setRoles(mapping.getRoles()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java index bc8d81cd268ad..7623a7f65af34 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java @@ -34,9 +34,13 @@ public GetSecuritySettingsAction() { public static class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } - public Request(StreamInput in) throws IOException {} + public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public void writeTo(StreamOutput out) throws IOException {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java index 20feb0faf5033..3cce133749e44 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java @@ -72,12 +72,14 @@ public Request( Map tokensIndexSettings, Map profilesIndexSettings ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.mainIndexSettings = Objects.requireNonNullElse(mainIndexSettings, Collections.emptyMap()); this.tokensIndexSettings = Objects.requireNonNullElse(tokensIndexSettings, Collections.emptyMap()); this.profilesIndexSettings = Objects.requireNonNullElse(profilesIndexSettings, Collections.emptyMap()); } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.mainIndexSettings = in.readGenericMap(); this.tokensIndexSettings = in.readGenericMap(); this.profilesIndexSettings = in.readGenericMap(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 372b62cffeaea..7f927d45a2375 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.slm.action.GetSLMStatusAction; import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; import java.util.Collection; @@ -165,7 +166,11 @@ public class ClusterPrivilegeResolver { ILMActions.STOP.name(), GetStatusAction.NAME ); - private static final Set READ_SLM_PATTERN = Set.of(GetSnapshotLifecycleAction.NAME, GetStatusAction.NAME); + private static final Set READ_SLM_PATTERN = Set.of( + GetSLMStatusAction.NAME, + GetSnapshotLifecycleAction.NAME, + GetStatusAction.NAME + ); private static final Set MANAGE_SEARCH_APPLICATION_PATTERN = Set.of("cluster:admin/xpack/application/search_application/*"); private static final Set MANAGE_SEARCH_QUERY_RULES_PATTERN = Set.of("cluster:admin/xpack/query_rules/*"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 49be4c5d466b2..eb4b7efdb88b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -49,7 +49,11 @@ static RoleDescriptor kibanaAdminUser(String name, Map metadata) null, null, metadata, - null + null, + null, + null, + null, + "Grants access to all features in Kibana." ); } @@ -408,7 +412,13 @@ static RoleDescriptor kibanaSystem(String name) { getRemoteIndicesReadPrivileges("traces-apm-*") }, null, null, - null + "Grants access necessary for the Kibana system user to read from and write to the Kibana indices, " + + "manage index templates and tokens, and check the availability of the Elasticsearch cluster. " + + "It also permits activating, searching, and retrieving user profiles, " + + "as well as updating user profile data for the kibana-* namespace. " + + "Additionally, this role grants read access to the .monitoring-* indices " + + "and read and write access to the .reporting-* indices. " + + "Note: This role should not be assigned to users as the granted permissions may change between releases." ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index dd8f34a60fa1f..2e7a5271103f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -103,7 +103,11 @@ public class ReservedRolesStore implements BiConsumer, ActionListene ) ), null, - null + "Grants full access to cluster management and data indices. " + + "This role also grants direct read-only access to restricted indices like .security. " + + "A user with this role can impersonate any other user in the system, " + + "manage security and create roles with unlimited privileges. " + + "Take extra care when assigning it to a user." ); private static final Map ALL_RESERVED_ROLES = initializeReservedRoles(); @@ -203,7 +207,12 @@ private static Map initializeReservedRoles() { getRemoteIndicesReadPrivileges("metricbeat-*") }, null, null, - null + "Grants the minimum privileges required for any user of X-Pack monitoring other than those required to use Kibana. " + + "This role grants access to the monitoring indices and grants privileges necessary " + + "for reading basic cluster information. " + + "This role also includes all Kibana privileges for the Elastic Stack monitoring features. " + + "Monitoring users should also be assigned the kibana_admin role, " + + "or another role with access to the Kibana instance." ) ), entry( @@ -232,7 +241,16 @@ private static Map initializeReservedRoles() { ) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants the minimum privileges required to write data into the monitoring indices (.monitoring-*). " + + "This role also has the privileges necessary to create Metricbeat indices (metricbeat-*) " + + "and write data into them." ) ), entry( @@ -251,7 +269,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the minimum privileges required to collect monitoring data for the Elastic Stack." ) ), entry( @@ -261,7 +283,14 @@ private static Map initializeReservedRoles() { new String[] { "manage_index_templates", "manage_pipeline" }, null, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to manage all index templates and all ingest pipeline configurations." ) ), // reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role @@ -275,7 +304,14 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use Kibana feature privileges instead"), - null + null, + null, + null, + null, + "Grants the specific privileges required for users of X-Pack reporting other than those required to use Kibana. " + + "This role grants access to the reporting indices; each user has access to only their own reports. " + + "Reporting users should also be assigned additional roles that grant access to Kibana as well as read access " + + "to the indices that will be used to generate reports." ) ), entry(KibanaSystemUser.ROLE_NAME, kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME)), @@ -286,7 +322,15 @@ private static Map initializeReservedRoles() { new String[] { "monitor", MonitoringBulkAction.NAME }, null, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the Logstash system user to send system-level data (such as monitoring) to Elasticsearch. " + + "This role should not be assigned to users as the granted permissions may change between releases." ) ), entry( @@ -297,7 +341,14 @@ private static Map initializeReservedRoles() { new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".management-beats").privileges("all").build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to the .management-beats index, which contains configuration information for the Beats." ) ), entry( @@ -311,7 +362,15 @@ private static Map initializeReservedRoles() { .privileges("create_index", "create") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the Beats system user to send system-level data (such as monitoring) to Elasticsearch. " + + "This role should not be assigned to users as the granted permissions may change between releases." ) ), entry( @@ -325,7 +384,14 @@ private static Map initializeReservedRoles() { .privileges("create_index", "create_doc") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the APM system user to send system-level data (such as monitoring) to Elasticsearch.\n" ) ), entry( @@ -381,7 +447,12 @@ private static Map initializeReservedRoles() { MetadataUtils.getDeprecatedReservedMetadata( "This role will be removed in a future major release. Please use editor and viewer roles instead" ), - null + null, + null, + null, + null, + "Grants the privileges required for APM users (such as read and view_index_metadata privileges " + + "on the apm-* and .ml-anomalies* indices)." ) ), entry( @@ -394,7 +465,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants access necessary to manage inference models and performing inference." ) ), entry( @@ -407,7 +482,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants access necessary to perform inference." ) ), entry( @@ -440,7 +519,15 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the minimum privileges required to view machine learning configuration, status, and work with results. " + + "This role grants monitor_ml cluster privileges, read access to the .ml-notifications and .ml-anomalies* indices " + + "(which store machine learning results), and write access to .ml-annotations* indices. " + + "Machine learning users also need index privileges for source and destination indices " + + "and roles that grant access to Kibana. " ) ), entry( @@ -474,7 +561,15 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Provides all of the privileges of the machine_learning_user role plus the full use of the machine learning APIs. " + + "Grants manage_ml cluster privileges, read access to .ml-anomalies*, .ml-notifications*, .ml-state*, " + + ".ml-meta* indices and write access to .ml-annotations* indices. " + + "Machine learning administrators also need index privileges for source and destination indices " + + "and roles that grant access to Kibana." ) ), // DEPRECATED: to be removed in 9.0.0 @@ -501,7 +596,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_admin] role instead"), - null + null, + null, + null, + null, + "Grants manage_data_frame_transforms cluster privileges, which enable you to manage transforms. " + + "This role also includes all Kibana privileges for the machine learning features." ) ), // DEPRECATED: to be removed in 9.0.0 @@ -528,7 +628,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_user] role instead"), - null + null, + null, + null, + null, + "Grants monitor_data_frame_transforms cluster privileges, which enable you to use transforms. " + + "This role also includes all Kibana privileges for the machine learning features. " ) ), entry( @@ -549,7 +654,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants manage_transform cluster privileges, which enable you to manage transforms. " + + "This role also includes all Kibana privileges for the machine learning features." ) ), entry( @@ -570,7 +680,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants monitor_transform cluster privileges, which enable you to perform read-only operations related to " + + "transforms. This role also includes all Kibana privileges for the machine learning features." ) ), entry( @@ -585,7 +700,16 @@ private static Map initializeReservedRoles() { .allowRestrictedIndices(true) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Allows users to create and execute all Watcher actions. " + + "Grants read access to the .watches index. Also grants read access " + + "to the watch history and the triggered watches index." ) ), entry( @@ -604,7 +728,14 @@ private static Map initializeReservedRoles() { .privileges("read") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants read access to the .watches index, the get watch action and the watcher stats." ) ), entry( @@ -619,16 +750,50 @@ private static Map initializeReservedRoles() { .allowRestrictedIndices(true) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to the .logstash* indices for managing configurations, " + + "and grants necessary access for logstash-specific APIs exposed by the logstash x-pack plugin." ) ), entry( "rollup_user", - new RoleDescriptor("rollup_user", new String[] { "monitor_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA) + new RoleDescriptor( + "rollup_user", + new String[] { "monitor_rollup" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants monitor_rollup cluster privileges, which enable you to perform read-only operations related to rollups." + ) ), entry( "rollup_admin", - new RoleDescriptor("rollup_admin", new String[] { "manage_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA) + new RoleDescriptor( + "rollup_admin", + new String[] { "manage_rollup" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants manage_rollup cluster privileges, which enable you to manage and execute all rollup actions." + ) ), entry( "snapshot_user", @@ -645,7 +810,14 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the necessary privileges to create snapshots of all the indices and to view their metadata. " + + "This role enables users to view the configuration of existing snapshot repositories and snapshot details. " + + "It does not grant authority to remove or add repositories or to restore snapshots. " + + "It also does not enable to change index settings or to read or update data stream or index data." ) ), entry( @@ -661,7 +833,14 @@ private static Map initializeReservedRoles() { .build(), RoleDescriptor.IndicesPrivileges.builder().indices(".enrich-*").privileges("manage", "write").build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to manage all enrich indices (.enrich-*) and all operations on ingest pipelines." ) ), entry("viewer", buildViewerRoleDescriptor()), @@ -703,7 +882,11 @@ private static RoleDescriptor buildViewerRoleDescriptor() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants read-only access to all features in Kibana (including Solutions) and to data indices." ); } @@ -750,7 +933,11 @@ private static RoleDescriptor buildEditorRoleDescriptor() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants full access to all features in Kibana (including Solutions) and read-only access to data indices." ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java index 17a23f6b66b5b..6e083295b0863 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java @@ -33,9 +33,12 @@ public Request(StreamInput in) throws IOException { lifecycleId = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(String lifecycleId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = Objects.requireNonNull(lifecycleId, "id may not be null"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java index 8a8ecf3a747a8..442ff6b2bfb66 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java @@ -36,6 +36,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private String lifecycleId; public Request(String lifecycleId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = lifecycleId; } @@ -44,7 +45,9 @@ public Request(StreamInput in) throws IOException { lifecycleId = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getLifecycleId() { return this.lifecycleId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java index 9574ba7fff685..e4d698f48d252 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java @@ -26,7 +26,9 @@ protected ExecuteSnapshotRetentionAction() { public static class Request extends AcknowledgedRequest implements ToXContentObject { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java index d556c0fda5e7f..ad62b155da41c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private SnapshotLifecyclePolicy lifecycle; public Request(String lifecycleId, SnapshotLifecyclePolicy lifecycle) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = lifecycleId; this.lifecycle = lifecycle; } @@ -46,7 +47,9 @@ public Request(StreamInput in) throws IOException { lifecycle = new SnapshotLifecyclePolicy(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getLifecycleId() { return this.lifecycleId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java index d6deb7bda384f..666701ac1f885 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java @@ -28,7 +28,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java index 60be1b99cde8d..4aae048b5e5b6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java @@ -28,7 +28,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java index 3623c659216d2..79ae38745934d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java @@ -34,7 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deleteDestIndex; public Request(String id, boolean force, boolean deleteDestIndex, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.force = force; this.deleteDestIndex = deleteDestIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index 0333322d2acc5..6fe4427b1065c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -77,7 +77,7 @@ public Request(StreamInput in) throws IOException { expandedIds = in.readCollectionAsImmutableList(StreamInput::readString); pageParams = new PageParams(in); allowNoMatch = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_BASIC_STATS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { basic = in.readBoolean(); } else { basic = false; @@ -130,7 +130,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(expandedIds); pageParams.writeTo(out); out.writeBoolean(allowNoMatch); - if (out.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_BASIC_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(basic); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java index f06ba16d9da78..adebbba651f16 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java @@ -58,7 +58,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private final TransformConfig config; public Request(TransformConfig config, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java index 9d335b2ccdb34..496e826651572 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java @@ -57,7 +57,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deferValidation; public Request(TransformConfig config, boolean deferValidation, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; this.deferValidation = deferValidation; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java index 609dd33cbfa9e..5840e107c1d17 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java @@ -34,7 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean force; public Request(String id, boolean force, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.force = force; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java index 3ecadd1b708cc..838a0650c8afa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java @@ -39,7 +39,7 @@ public static class Request extends AcknowledgedRequest { private final Instant from; public Request(String id, Instant from, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.from = from; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java index 3a36d9163e0c0..cdc0a53b6f0a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java @@ -40,7 +40,7 @@ public Request(StreamInput in) throws IOException { } public Request(boolean dryRun, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.dryRun = dryRun; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java index de6435ad31dbc..55c21b91b11d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java @@ -36,7 +36,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deferValidation; public Request(TransformConfig config, boolean deferValidation, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; this.deferValidation = deferValidation; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java index 576bd220853ce..902c6db07dc89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java @@ -30,9 +30,13 @@ public GetWatcherSettingsAction() { public static class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } - public Request(StreamInput in) throws IOException {} + public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public void writeTo(StreamOutput out) throws IOException {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java index 29f4db51e146e..b6d999ebbf380 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java @@ -39,10 +39,12 @@ public static class Request extends AcknowledgedRequest { private final Map settings; public Request(Map settings) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.settings = settings; } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.settings = in.readGenericMap(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java index 93cc7a18594d6..449179e4f18f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java @@ -29,7 +29,9 @@ public WatcherServiceRequest(StreamInput in) throws IOException { command = Command.valueOf(in.readString().toUpperCase(Locale.ROOT)); } - public WatcherServiceRequest() {} + public WatcherServiceRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Starts the watcher service if not already started. diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index cef2d710237cf..476167c5db0fb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -192,7 +192,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + } else if (version.before(TransportVersions.V_8_13_0)) { return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), @@ -202,7 +202,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED) + } else if (version.before(TransportVersions.V_8_13_0) && (instance.getInputType() == InputType.UNSPECIFIED || instance.getInputType() == InputType.CLASSIFICATION || instance.getInputType() == InputType.CLUSTERING)) { @@ -215,7 +215,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED) + } else if (version.before(TransportVersions.V_8_13_0) && (instance.getInputType() == InputType.CLUSTERING || instance.getInputType() == InputType.CLASSIFICATION)) { return new InferenceAction.Request( instance.getTaskType(), @@ -262,138 +262,10 @@ public void testWriteTo_WhenVersionIsOnAfterUnspecifiedAdded() throws IOExceptio InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED + TransportVersions.V_8_13_0 ); } - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest() throws IOException { - assertBwcSerialization( - new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.UNSPECIFIED, - InferenceAction.Request.DEFAULT_TIMEOUT - ), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_ManualCheck() throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.UNSPECIFIED, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClustering_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLUSTERING, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClassification_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLASSIFICATION, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public - void - testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClassification_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLASSIFICATION, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); - } - - public - void - testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClustering_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLUSTERING, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); - } - public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUnspecified() throws IOException { var instance = new InferenceAction.Request( TaskType.TEXT_EMBEDDING, @@ -409,44 +281,21 @@ public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUn instance, getNamedWriteableRegistry(), instanceReader(), - TransportVersions.HOT_THREADS_AS_BYTES + TransportVersions.V_8_12_1 ); assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsUnspecified_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.UNSPECIFIED, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); + assertThat(getInputTypeToWrite(InputType.UNSPECIFIED, TransportVersions.V_8_12_1), is(InputType.INGEST)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsClassification_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); + assertThat(getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.V_8_12_1), is(InputType.INGEST)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsClustering_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); - } - - public void testGetInputTypeToWrite_ReturnsUnspecified_WhenInputTypeIsClassification_VersionBeforeClusteringClassIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED), - is(InputType.UNSPECIFIED) - ); - } - - public void testGetInputTypeToWrite_ReturnsUnspecified_WhenInputTypeIsClustering_VersionBeforeClusteringClassIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED), - is(InputType.UNSPECIFIED) - ); + assertThat(getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.V_8_12_1), is(InputType.INGEST)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java index 3be073b439828..603531f0aedf9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java @@ -16,6 +16,8 @@ import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.TransportVersions.ML_RERANK_DOC_OPTIONAL; + public class RankedDocsResultsTests extends AbstractBWCSerializationTestCase { @Override @@ -33,7 +35,7 @@ public static RankedDocsResults createRandom() { } public static RankedDocsResults.RankedDoc createRandomDoc() { - return new RankedDocsResults.RankedDoc(randomIntBetween(0, 100), randomFloat(), randomAlphaOfLength(10)); + return new RankedDocsResults.RankedDoc(randomIntBetween(0, 100), randomFloat(), randomBoolean() ? null : randomAlphaOfLength(10)); } @Override @@ -45,7 +47,24 @@ protected RankedDocsResults mutateInstance(RankedDocsResults instance) throws IO @Override protected RankedDocsResults mutateInstanceForVersion(RankedDocsResults instance, TransportVersion fromVersion) { - return instance; + if (fromVersion.onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + return instance; + } else { + var compatibleDocs = rankedDocsNullStringToEmpty(instance.getRankedDocs()); + return new RankedDocsResults(compatibleDocs); + } + } + + private List rankedDocsNullStringToEmpty(List rankedDocs) { + var result = new ArrayList(rankedDocs.size()); + for (var doc : rankedDocs) { + if (doc.text() == null) { + result.add(new RankedDocsResults.RankedDoc(doc.index(), doc.relevanceScore(), "")); + } else { + result.add(doc); + } + } + return result; } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java index 9c435bd37b2cb..3ab5851815474 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; @@ -22,23 +23,21 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED; -import static org.elasticsearch.TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED; import static org.hamcrest.Matchers.is; public class CoordinatedInferenceActionRequestTests extends AbstractBWCWireSerializationTestCase { public void testSerializesPrefixType_WhenTransportVersionIs_InputTypeAdded() throws IOException { var instance = createTestInstance(); instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); - var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); - assertOnBWCObject(copy, instance, ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), TransportVersions.V_8_13_0); + assertOnBWCObject(copy, instance, TransportVersions.V_8_13_0); assertThat(copy.getPrefixType(), is(TrainedModelPrefixStrings.PrefixType.INGEST)); } public void testSerializesPrefixType_DoesNotSerialize_WhenTransportVersion_IsPriorToInputTypeAdded() throws IOException { var instance = createTestInstance(); instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); - var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), UPDATE_API_KEY_EXPIRATION_TIME_ADDED); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), TransportVersions.V_8_12_1); assertNotSame(copy, instance); assertNotEquals(copy, instance); @@ -117,7 +116,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( CoordinatedInferenceAction.Request instance, TransportVersion version ) { - if (version.before(ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (version.before(TransportVersions.V_8_13_0)) { instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java index ee265538829d3..eb0b8420625ac 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java @@ -23,11 +23,14 @@ protected Writeable.Reader instanceReader() { @Override protected Request createTestInstance() { - return new Request(randomTimeValue(0, 10_000)); + return new Request(TimeValue.THIRTY_SECONDS, randomTimeValue(0, 10_000)); } @Override protected Request mutateInstance(Request instance) throws IOException { - return new Request(TimeValue.timeValueMillis(instance.ackTimeout().millis() + randomIntBetween(1, 1000))); + return new Request( + TimeValue.THIRTY_SECONDS, + TimeValue.timeValueMillis(instance.requestTimeout().millis() + randomIntBetween(1, 1000)) + ); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java index 6ba7dc6ac24cd..9d3c4d684e194 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -21,6 +20,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.indices.SystemIndexDescriptor; @@ -297,7 +297,7 @@ public void testAddDocMappingIfMissing() { {"_doc":{"properties":{"some-field":{"type":"long"}}}}""", client, clusterState, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, ActionTestUtils.assertNoFailureListener(Assert::assertTrue), 1 ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index f9fdc0c8362e5..f72ca14c37e14 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.AdminClient; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ClusterAdminClient; @@ -371,7 +370,7 @@ private void createIndexAndAliasIfNecessary(ClusterState clusterState) { TestIndexNameExpressionResolver.newInstance(), TEST_INDEX_PREFIX, TEST_INDEX_ALIAS, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, listener ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index d15fb9a1409dd..ad73944f4c64d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -460,7 +460,12 @@ public void testSlmPrivileges() { } { - verifyClusterActionAllowed(ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/get", "cluster:admin/ilm/operation_mode/get"); + verifyClusterActionAllowed( + ClusterPrivilegeResolver.READ_SLM, + "cluster:admin/slm/get", + "cluster:admin/slm/status", + "cluster:admin/ilm/operation_mode/get" + ); verifyClusterActionDenied( ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/delete", diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json new file mode 100644 index 0000000000000..933d7681c92e8 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json @@ -0,0 +1,14 @@ +{ + "template": { + "settings": { + "number_of_shards": 1, + "auto_expand_replicas": "0-1" + } + }, + "_meta": { + "description": "default kibana reporting settings installed by elasticsearch", + "managed": true + }, + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json index 9c4da646c3399..240ad36199fe3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json @@ -5,14 +5,10 @@ "hidden": true }, "allow_auto_create": true, - "composed_of": ["kibana-reporting@custom"], + "composed_of": ["kibana-reporting@settings", "kibana-reporting@custom"], "ignore_missing_component_templates": ["kibana-reporting@custom"], "template": { "lifecycle": {}, - "settings": { - "number_of_shards": 1, - "auto_expand_replicas": "0-1" - }, "mappings": { "properties": { "meta": { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 13ef198863284..3376073bded02 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -331,6 +331,7 @@ public static class Request extends MasterNodeReadRequest implements In private String[] indices; public Request(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.indices = indices; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java index 43601ab1b2943..ac5c5761efe13 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java @@ -44,6 +44,7 @@ public Request(StreamInput in) throws IOException { } public Request(String collectionName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.collectionName = collectionName; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java index f9eeb2cca6d2e..d54c119e083ed 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java @@ -41,6 +41,7 @@ public static class Request extends MasterNodeReadRequest implements To public static ParseField NAMES_FIELD = new ParseField("names"); public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "Collection names cannot be null"); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 659c58d2bd1b8..108cebae155be 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -43,6 +43,7 @@ public Request(StreamInput in) throws IOException { } public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 3cebcd75cbe7a..a91999a49c16b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -23,7 +25,9 @@ final class BooleanArrayVector extends AbstractVector implements BooleanVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final boolean[] values; @@ -89,6 +93,11 @@ public BooleanVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(boolean[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 5f6db129e73d3..9215cd0d9bbda 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -87,6 +89,11 @@ public BooleanVector filter(int... positions) { return new BooleanBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link BitArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index 7218f3d2771c8..c8921a7c9f02e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface BooleanVector extends Vector permits ConstantBooleanVect @Override BooleanVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BooleanVector, and both vectors are {@link #equals(BooleanVector, BooleanVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 013718bb42a7d..193e6ea5d8965 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -52,9 +52,8 @@ public BooleanBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BooleanLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 81f507a4fa55a..61bbfb5ebbd02 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -11,7 +11,9 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -25,7 +27,9 @@ final class BytesRefArrayVector extends AbstractVector implements BytesRefVector static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final BytesRefArray values; @@ -89,6 +93,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(BytesRefArray values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 49075789ed4a4..6232cbdd2717c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -21,10 +21,6 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index 4f07ca2d61049..3739dccb0f956 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -11,6 +11,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -34,6 +36,9 @@ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVe @Override BytesRefVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BytesRefVector, and both vectors are {@link #equals(BytesRefVector, BytesRefVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 39bd37ea9bc34..16a8fc0888096 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -63,9 +63,8 @@ public BytesRefBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BytesRefLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index 16d70d1a0e800..1f6786f64e0a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant boolean value. @@ -39,6 +41,28 @@ public BooleanVector filter(int... positions) { return blockFactory().newConstantBooleanVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BooleanBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBooleanBlockWith(value, positions.getPositionCount())); + } + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BOOLEAN; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index eed780a42f7ba..33967d66374c1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant BytesRef value. @@ -45,6 +47,28 @@ public BytesRefVector filter(int... positions) { return blockFactory().newConstantBytesRefVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BytesRefBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBytesRefBlockWith(value, positions.getPositionCount())); + } + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BYTES_REF; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index a783f0243313e..1ddf31d753d43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant double value. @@ -39,6 +41,28 @@ public DoubleVector filter(int... positions) { return blockFactory().newConstantDoubleVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((DoubleBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantDoubleBlockWith(value, positions.getPositionCount())); + } + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.DOUBLE; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index c6d463af7cfad..e8fb8cb39ceb4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant int value. @@ -39,6 +41,28 @@ public IntVector filter(int... positions) { return blockFactory().newConstantIntVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((IntBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantIntBlockWith(value, positions.getPositionCount())); + } + return new IntLookup(asBlock(), positions, targetBlockSize); + } + /** * The minimum value in the block. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 0173f1c1d4d7a..b997cbbe22849 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant long value. @@ -39,6 +41,28 @@ public LongVector filter(int... positions) { return blockFactory().newConstantLongVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((LongBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantLongBlockWith(value, positions.getPositionCount())); + } + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.LONG; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 451b6cc7b655b..e7c1d342133d5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -23,7 +25,9 @@ final class DoubleArrayVector extends AbstractVector implements DoubleVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final double[] values; @@ -88,6 +92,11 @@ public DoubleVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(double[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index 8f6aedf31b50e..d558eabd2dd4c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public DoubleVector filter(int... positions) { return new DoubleBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link DoubleArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 1d71575b33316..3d93043f93d8f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector @Override DoubleVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a DoubleVector, and both vectors are {@link #equals(DoubleVector, DoubleVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index e76a4e0c5fdee..24887bebcd838 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -52,9 +52,8 @@ public DoubleBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new DoubleLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index a2b6697a38634..e9d9a6b3fb958 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -23,7 +25,9 @@ final class IntArrayVector extends AbstractVector implements IntVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final int[] values; @@ -98,6 +102,11 @@ public IntVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(int[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index fe89782bad0ec..df8298b87237e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -126,6 +128,11 @@ public IntVector filter(int... positions) { return new IntBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link IntArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 8f6f42b66fbe6..b1a2d1b80a410 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface IntVector extends Vector permits ConstantIntVector, IntA @Override IntVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 70bcf6919bea6..ae28fb9f6ffa6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -52,9 +52,8 @@ public IntBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new IntLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 6eec82528c8da..5fa904dcf1acc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -23,7 +25,9 @@ final class LongArrayVector extends AbstractVector implements LongVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final long[] values; @@ -88,6 +92,11 @@ public LongVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(long[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index d30dedd4cce16..a7828788169ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public LongVector filter(int... positions) { return new LongBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link LongArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 2ebdb89a31262..e2f53d1ee07f4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface LongVector extends Vector permits ConstantLongVector, Lo @Override LongVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a LongVector, and both vectors are {@link #equals(LongVector, LongVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index b6f1e8e77505d..01921e1195f4a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -52,9 +52,8 @@ public LongBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new LongLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index ed7ee93c99325..9a6b701a2e4ea 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.ByteSizeValue; @@ -44,6 +45,17 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R */ long MAX_LOOKUP = 100_000; + /** + * We do not track memory for pages directly (only for single blocks), + * but the page memory overhead can still be significant, especially for pages containing thousands of blocks. + * For now, we approximate this overhead, per block, using this value. + * + * The exact overhead per block would be (more correctly) {@link RamUsageEstimator#NUM_BYTES_OBJECT_REF}, + * but we approximate it with {@link RamUsageEstimator#NUM_BYTES_OBJECT_ALIGNMENT} to avoid further alignments + * to object size (at the end of the alignment, it would make no practical difference). + */ + int PAGE_MEM_OVERHEAD_PER_BLOCK = RamUsageEstimator.NUM_BYTES_OBJECT_ALIGNMENT; + /** * {@return an efficient dense single-value view of this block}. * Null, if the block is not dense single-valued. That is, if @@ -127,19 +139,19 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R * same number of {@link #getPositionCount() positions} as the {@code positions} * parameter. *

- * For example, this this block contained {@code [a, b, [b, c]]} + * For example, if this block contained {@code [a, b, [b, c]]} * and were called with the block {@code [0, 1, 1, [1, 2]]} then the * result would be {@code [a, b, b, [b, b, c]]}. *

*

* This process produces {@code count(this) * count(positions)} values per - * positions which could be quite quite large. Instead of returning a single + * positions which could be quite large. Instead of returning a single * Block, this returns an Iterator of Blocks containing all of the promised * values. *

*

- * The returned {@link ReleasableIterator} may retain a reference to {@link Block}s - * inside the {@link Page}. Close it to release those references. + * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. *

*

* This block is built using the same {@link BlockFactory} as was used to diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java index 4deededdf41c5..a8a6dbaf382f9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -45,6 +47,12 @@ public ConstantNullVector filter(int... positions) { throw new UnsupportedOperationException("null vector"); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + @Override public boolean getBoolean(int position) { assert false : "null vector"; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index e5a0d934aa01a..da9ca2bbae270 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -52,7 +52,7 @@ public Block filter(int... positions) { @Override public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("can't lookup values from DocBlock"); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 067fddd311cc7..33f5797f60df8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.IntroSorter; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.util.Objects; @@ -235,6 +237,11 @@ public DocVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException("can't lookup values from DocVector"); + } + @Override public ElementType elementType() { return ElementType.DOC; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java index a67db54b68ec9..ec0c7efa715ad 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -120,6 +122,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return bytes.elementType(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 89b39569be454..9a5688685374d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -8,8 +8,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; /** * A dense Vector of single values. @@ -35,6 +37,33 @@ public interface Vector extends Accountable, RefCounted, Releasable { */ Vector filter(int... positions); + /** + * Builds an Iterator of new {@link Block}s with the same {@link #elementType} + * as this {@link Vector} whose values are copied from positions in this Vector. + * It has the same number of {@link #getPositionCount() positions} as the + * {@code positions} parameter. + *

+ * For example, if this vector contained {@code [a, b, c]} + * and were called with the block {@code [0, 1, 1, [1, 2]]} then the + * result would be {@code [a, b, b, [b, c]]}. + *

+ *

+ * This process produces {@code count(positions)} values per + * positions which could be quite large. Instead of returning a single + * Block, this returns an Iterator of Blocks containing all of the promised + * values. + *

+ *

+ * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. + *

+ *

+ * This block is built using the same {@link BlockFactory} as was used to + * build the {@code positions} parameter. + *

+ */ + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * {@return the element type of this vector} */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 7eeb7765e3b1e..d594d32898d36 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -12,7 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -21,6 +23,8 @@ $else$ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -38,7 +42,9 @@ final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$ArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; $if(BytesRef)$ private final BytesRefArray values; @@ -166,6 +172,11 @@ $endif$ } } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index d6a8723748c1f..30ef9e799cf11 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -10,8 +10,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.$Array$; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -148,6 +150,11 @@ $endif$ return new $Type$BigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link $if(boolean)$Bit$else$$Type$$endif$Array} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 0d3d2293a1bb1..8397a0f5274f1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -31,10 +31,6 @@ final class $Type$BlockBuilder extends AbstractBlockBuilder implements $Type$Blo $if(BytesRef)$ private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 37cb2d2412522..42c34128121a8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -11,6 +11,8 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant $type$ value. @@ -58,6 +60,28 @@ $endif$ return blockFactory().newConstant$Type$Vector(value, positions.length); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single(($Type$Block) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstant$Type$BlockWith(value, positions.getPositionCount())); + } + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + $if(int)$ /** * The minimum value in the block. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 746ccc97a2819..628ee93ed757d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -13,6 +13,8 @@ $endif$ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -54,6 +56,9 @@ $endif$ @Override $Type$Vector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + $if(int)$ /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index f011d6f2a4b48..8f4390e8782c5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -72,9 +72,8 @@ $endif$ } @Override - public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new $Type$Lookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index fae0a86762b92..10c78be15bd86 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -295,7 +295,7 @@ private Status(LuceneOperator operator) { Status(StreamInput in) throws IOException { processedSlices = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { processedQueries = in.readCollectionAsSet(StreamInput::readString); processedShards = in.readCollectionAsSet(StreamInput::readString); } else { @@ -314,7 +314,7 @@ private Status(LuceneOperator operator) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(processedSlices); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeCollection(processedQueries, StreamOutput::writeString); out.writeCollection(processedShards, StreamOutput::writeString); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java new file mode 100644 index 0000000000000..bb8d3fd269a8a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; +import org.elasticsearch.compute.data.ElementType; + +import java.util.ArrayList; +import java.util.List; + +/** + * This class provides operator factories for time-series aggregations. + * A time-series aggregation executes in three stages, deviating from the typical two-stage aggregation. + * For example: {@code sum(rate(write_requests)), avg(cpu) BY cluster, time-bucket} + * + * 1. Initial Stage: + * In this stage, a standard hash aggregation is executed, grouped by tsid and time-bucket. + * The {@code values} aggregations are added to collect values of the grouping keys excluding the time-bucket, + * which are then used for final result grouping. + * {@code rate[INITIAL](write_requests), avg[INITIAL](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 2. Intermediate Stage: + * Equivalent to the final mode of a standard hash aggregation. + * This stage merges and reduces the result of the rate aggregations, + * but merges (without reducing) the results of non-rate aggregations. + * {@code rate[FINAL](write_requests), avg[INTERMEDIATE](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 3. Final Stage: + * This extra stage performs outer aggregations over the rate results + * and combines the intermediate results of non-rate aggregations using the specified user-defined grouping keys. + * {@code sum[SINGLE](rate_result), avg[FINAL](cpu) BY cluster, bucket} + */ +public final class TimeSeriesAggregationOperatorFactories { + + public record Initial( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + return new HashAggregationOperator( + aggregators, + () -> new TimeSeriesBlockHash(tsHashChannel, timeBucketChannel, driverContext), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesInitialAggregationOperatorFactory"; + } + } + + public record Intermediate( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + List hashGroups = List.of( + new BlockHash.GroupSpec(tsHashChannel, ElementType.BYTES_REF), + new BlockHash.GroupSpec(timeBucketChannel, ElementType.LONG) + ); + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(hashGroups, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesIntermediateAggregationOperatorFactory"; + } + } + + public record Final( + List groupings, + List outerRates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(outerRates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : outerRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(groupings, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesFinalAggregationOperatorFactory"; + } + } + + static List valuesAggregatorForGroupings(List groupings, int timeBucketChannel) { + List aggregators = new ArrayList<>(); + for (BlockHash.GroupSpec g : groupings) { + if (g.channel() != timeBucketChannel) { + final List channels = List.of(g.channel()); + // TODO: perhaps introduce a specialized aggregator for this? + var aggregatorSupplier = (switch (g.elementType()) { + case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(channels); + case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(channels); + case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); + case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); + case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); + case NULL, DOC, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); + }); + aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + } + return aggregators; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java deleted file mode 100644 index 0cf0854a9b0c7..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.aggregation.GroupingAggregator; -import org.elasticsearch.compute.aggregation.blockhash.BlockHash; -import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; -import org.elasticsearch.core.TimeValue; - -import java.util.List; - -public record TimeSeriesAggregationOperatorFactory( - AggregatorMode mode, - int tsHashChannel, - int timestampIntervalChannel, - TimeValue timeSeriesPeriod, - List aggregators, - int maxPageSize -) implements Operator.OperatorFactory { - - @Override - public String describe() { - return "TimeSeriesAggregationOperator[mode=" - + mode - + ", tsHashChannel = " - + tsHashChannel - + ", timestampIntervalChannel = " - + timestampIntervalChannel - + ", timeSeriesPeriod = " - + timeSeriesPeriod - + ", maxPageSize = " - + maxPageSize - + "]"; - } - - @Override - public Operator get(DriverContext driverContext) { - BlockHash blockHash = new TimeSeriesBlockHash(tsHashChannel, timestampIntervalChannel, driverContext); - return new HashAggregationOperator(aggregators, () -> blockHash, driverContext); - } - -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index f1698ea401d28..adce8d8a88407 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.compute.data.Page; @@ -17,6 +18,7 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.transport.TransportException; +import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -89,6 +91,20 @@ public int bufferSize() { } } + public void addCompletionListener(ActionListener listener) { + buffer.addCompletionListener(ActionListener.running(() -> { + try (RefCountingListener refs = new RefCountingListener(listener)) { + for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { + // Create an outstanding instance and then finish to complete the completionListener + // if we haven't registered any instances of exchange sinks or exchange sources before. + pending.trackNewInstance(); + pending.completion.addListener(refs.acquire()); + pending.finishInstance(); + } + } + })); + } + /** * Create a new {@link ExchangeSource} for exchanging data * @@ -253,10 +269,10 @@ public Releasable addEmptySink() { private static class PendingInstances { private final AtomicInteger instances = new AtomicInteger(); - private final Releasable onComplete; + private final SubscribableListener completion = new SubscribableListener<>(); - PendingInstances(Releasable onComplete) { - this.onComplete = onComplete; + PendingInstances(Runnable onComplete) { + completion.addListener(ActionListener.running(onComplete)); } void trackNewInstance() { @@ -268,7 +284,7 @@ void finishInstance() { int refs = instances.decrementAndGet(); assert refs >= 0; if (refs == 0) { - onComplete.close(); + completion.onResponse(null); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 3d80e560cc4d2..017d4c7065bed 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -35,6 +35,7 @@ import java.util.BitSet; import java.util.List; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -283,8 +284,19 @@ public void testConstantIntBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantIntVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); assertThat(block.asVector().min(), equalTo(value)); assertThat(block.asVector().max(), equalTo(value)); @@ -365,8 +377,19 @@ public void testConstantLongBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantLongVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -447,8 +470,19 @@ public void testConstantDoubleBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantDoubleVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -605,8 +639,19 @@ public void testConstantBytesRefBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBytesRefVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -689,8 +734,19 @@ public void testConstantBooleanBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBooleanVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -716,6 +772,24 @@ public void testConstantNullBlock() { assertThat(positionCount, is(block.getPositionCount())); assertThat(block.getPositionCount(), is(positionCount)); assertThat(block.isNull(randomPosition(positionCount)), is(true)); + if (positionCount > 2) { + List> expected = new ArrayList<>(); + expected.add(null); + expected.add(null); + expected.add(null); + assertLookup( + block, + positions(blockFactory, 1, 2, new int[] { 1, 2 }), + expected, + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); + } + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); releaseAndAssertBreaker(block); } } @@ -1544,11 +1618,16 @@ static void assertEmptyLookup(BlockFactory blockFactory, Block block) { } static void assertLookup(Block block, IntBlock positions, List> expected) { + assertLookup(block, positions, expected, l -> {}); + } + + static void assertLookup(Block block, IntBlock positions, List> expected, Consumer extra) { try (positions; ReleasableIterator lookup = block.lookup(positions, ByteSizeValue.ofKb(100))) { assertThat(lookup.hasNext(), equalTo(true)); try (Block b = lookup.next()) { assertThat(valuesAtPositions(b, 0, b.getPositionCount()), equalTo(expected)); assertThat(b.blockFactory(), sameInstance(positions.blockFactory())); + extra.accept(b); } assertThat(lookup.hasNext(), equalTo(false)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index ae43e3954935d..86bfec5120945 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -42,9 +42,8 @@ public class BlockAccountingTests extends ComputeTestCase { public void testBooleanVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newBooleanArrayVector(new boolean[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1); @@ -62,9 +61,8 @@ public void testBooleanVector() { public void testIntVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newIntArrayVector(new int[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1); @@ -82,9 +80,8 @@ public void testIntVector() { public void testLongVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newLongArrayVector(new long[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1); @@ -103,9 +100,8 @@ public void testLongVector() { public void testDoubleVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newDoubleArrayVector(new double[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1); @@ -127,9 +123,8 @@ public void testBytesRefVector() { var emptyArray = new BytesRefArray(0, blockFactory.bigArrays()); var arrayWithOne = new BytesRefArray(0, blockFactory.bigArrays()); Vector emptyVector = blockFactory.newBytesRefArrayVector(emptyArray, 0); - long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BytesRefVectorBlock.class - ); + long expectedEmptyVectorUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); @@ -146,9 +141,8 @@ public void testBytesRefVector() { public void testBooleanBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new BooleanArrayBlock( @@ -194,18 +188,16 @@ public void testBooleanBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), lessThanOrEqualTo(expectedEmptyUsed)); } public void testIntBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new IntArrayBlock( @@ -242,18 +234,16 @@ public void testIntBlock() { public void testIntBlockWithNullFirstValues() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testLongBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new LongArrayBlock(new long[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new LongArrayBlock( @@ -299,18 +289,16 @@ public void testLongBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testDoubleBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new DoubleArrayBlock( @@ -356,9 +344,8 @@ public void testDoubleBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index 79135b12b2a83..573c960e86b9c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -11,65 +11,49 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; -import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.hamcrest.Matcher; import org.junit.After; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; -import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; -public class TimeSeriesAggregationOperatorTests extends AnyOperatorTestCase { +public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { - private IndexReader reader; - private final Directory directory = newDirectory(); + private IndexReader reader = null; + private Directory directory = null; @After public void cleanup() throws IOException { IOUtils.close(reader, directory); } - @Override - protected Operator.OperatorFactory simple() { - return new TimeSeriesAggregationOperatorFactory(AggregatorMode.FINAL, 0, 1, TimeValue.ZERO, List.of(), 100); + /** + * A {@link DriverContext} with a nonBreakingBigArrays. + */ + protected DriverContext driverContext() { // TODO make this final once all operators support memory tracking + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); } - @Override - protected Matcher expectedDescriptionOfSimple() { - return equalTo( - "TimeSeriesAggregationOperator[mode=FINAL, tsHashChannel = 0, timestampIntervalChannel = 1, " - + "timeSeriesPeriod = 0s, maxPageSize = 100]" - ); - } - - @Override - protected Matcher expectedToStringOfSimple() { - return equalTo( - "HashAggregationOperator[blockHash=TimeSeriesBlockHash{keys=[BytesRefKey[channel=0], " - + "LongKey[channel=1]], entries=-1b}, aggregators=[]]" - ); - } - - public void testBasicRate() { + public void testBasicRate() throws Exception { long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; @@ -78,25 +62,51 @@ public void testBasicRate() { long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - long unit = between(1, 5); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit), TimeValue.ZERO); - assertThat( - actualRates, - equalTo( - Map.of( - new Group("\u0001\u0003pods\u0002p1", 0), - 35.0 * unit / 111.0, - new Group("\u0001\u0003pods\u0002p2", 0), - 42.0 * unit / 13.0, - new Group("\u0001\u0003pods\u0002p3", 0), - 10.0 * unit / 20.0 - ) - ) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(2100, t1, v1)), + new Pod("p2", "cluster_1", new Interval(600, t2, v2)), + new Pod("p3", "cluster_2", new Interval(1100, t3, v3)) ); + long unit = between(1, 5); + { + List> actual = runRateTest( + pods, + List.of("cluster"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 35.0 * unit / 111.0 + 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest(pods, List.of("pod"), TimeValue.timeValueMillis(unit), TimeValue.timeValueMillis(500)); + List> expected = List.of( + List.of(new BytesRef("p1"), 35.0 * unit / 111.0), + List.of(new BytesRef("p2"), 42.0 * unit / 13.0), + List.of(new BytesRef("p3"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest( + pods, + List.of("cluster", "bucket"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 2000L, 35.0 * unit / 111.0), + List.of(new BytesRef("cluster_1"), 500L, 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 1000L, 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } } - public void testRateWithInterval() { + public void testRateWithInterval() throws Exception { long[] v1 = { 1, 2, 3, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3 }; long[] t1 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; @@ -105,59 +115,71 @@ public void testRateWithInterval() { long[] v3 = { 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 }; long[] t3 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1)); - assertMap( - actualRates, - matchesMap().entry(new Group("\u0001\u0003pods\u0002p1", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p1", 60_000), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p1", 0), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p2", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 60_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 0), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 60_000), 0.07936D) - .entry(new Group("\u0001\u0003pods\u0002p3", 0), 0.00124D) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(0, t1, v1)), + new Pod("p2", "cluster_2", new Interval(0, t2, v2)), + new Pod("p3", "cluster_2", new Interval(0, t3, v3)) + ); + List> actual = runRateTest( + pods, + List.of("pod", "bucket"), + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1) + ); + List> expected = List.of( + List.of(new BytesRef("p1]"), 120_000L, 0.0D), + List.of(new BytesRef("p1"), 60_000L, 8.0E-5D), + List.of(new BytesRef("p1"), 0, 8.0E-5D), + List.of(new BytesRef("p2"), 120_000L, 0.0D), + List.of(new BytesRef("p2"), 60_000L, 0.0D), + List.of(new BytesRef("p2"), 0L, 0.0D), + List.of(new BytesRef("p3"), 120_000L, 0.0D), + List.of(new BytesRef("p3"), 60_000L, 0.07936D), + List.of(new BytesRef("p3"), 0L, 0.00124D) ); } - public void testRandomRate() { + public void testRandomRate() throws Exception { int numPods = between(1, 10); List pods = new ArrayList<>(); - Map expectedRates = new HashMap<>(); TimeValue unit = TimeValue.timeValueSeconds(1); + List> expected = new ArrayList<>(); for (int p = 0; p < numPods; p++) { - int numValues = between(2, 100); - long[] values = new long[numValues]; - long[] times = new long[numValues]; - long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - for (int i = 0; i < numValues; i++) { - values[i] = randomIntBetween(0, 100); - t += TimeValue.timeValueSeconds(between(1, 10)).millis(); - times[i] = t; + int numIntervals = randomIntBetween(1, 3); + Interval[] intervals = new Interval[numIntervals]; + long startTimeInHours = between(10, 100); + String podName = "p" + p; + for (int interval = 0; interval < numIntervals; interval++) { + final long startInterval = TimeValue.timeValueHours(--startTimeInHours).millis(); + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long delta = 0; + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + delta += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = delta; + } + intervals[interval] = new Interval(startInterval, times, values); + if (numValues == 1) { + expected.add(List.of(new BytesRef(podName), startInterval, null)); + } else { + expected.add(List.of(new BytesRef(podName), startInterval, intervals[interval].expectedRate(unit))); + } } - Pod pod = new Pod("p" + p, times, values); + Pod pod = new Pod(podName, "cluster", intervals); pods.add(pod); - if (numValues == 1) { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), null); - } else { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), pod.expectedRate(unit)); - } } - Map actualRates = runRateTest(pods, unit, TimeValue.ZERO); - assertThat(actualRates, equalTo(expectedRates)); + List> actual = runRateTest(pods, List.of("pod", "bucket"), unit, TimeValue.timeValueHours(1)); + assertThat(actual, equalTo(expected)); } - record Pod(String name, long[] times, long[] values) { - Pod { - assert times.length == values.length : times.length + "!=" + values.length; - } - + record Interval(long offset, long[] times, long[] values) { double expectedRate(TimeValue unit) { double dv = 0; - for (int i = 0; i < values.length - 1; i++) { - if (values[i + 1] < values[i]) { - dv += values[i]; + for (int v = 0; v < values.length - 1; v++) { + if (values[v + 1] < values[v]) { + dv += values[v]; } } dv += (values[values.length - 1] - values[0]); @@ -166,9 +188,13 @@ record Pod(String name, long[] times, long[] values) { } } - Map runRateTest(List pods, TimeValue unit, TimeValue interval) { + record Pod(String name, String cluster, Interval... intervals) {} + + List> runRateTest(List pods, List groupings, TimeValue unit, TimeValue bucketInterval) throws IOException { + cleanup(); + directory = newDirectory(); long unitInMillis = unit.millis(); - record Doc(String pod, long timestamp, long requests) { + record Doc(String pod, String cluster, long timestamp, long requests) { } var sourceOperatorFactory = createTimeSeriesSourceOperator( @@ -177,70 +203,114 @@ record Doc(String pod, long timestamp, long requests) { Integer.MAX_VALUE, between(1, 100), randomBoolean(), - interval, + bucketInterval, writer -> { List docs = new ArrayList<>(); for (Pod pod : pods) { - for (int i = 0; i < pod.times.length; i++) { - docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + for (Interval interval : pod.intervals) { + for (int i = 0; i < interval.times.length; i++) { + docs.add(new Doc(pod.name, pod.cluster, interval.offset + interval.times[i], interval.values[i])); + } } } Randomness.shuffle(docs); for (Doc doc : docs) { - writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + writeTS( + writer, + doc.timestamp, + new Object[] { "pod", doc.pod, "cluster", doc.cluster }, + new Object[] { "requests", doc.requests } + ); } return docs.size(); } ); var ctx = driverContext(); - var aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) - ); - Operator initialHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.INITIAL, + List extractOperators = new ArrayList<>(); + var rateField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + Operator extractRate = (ValuesSourceReaderOperatorTests.factory(reader, rateField, ElementType.LONG).get(ctx)); + extractOperators.add(extractRate); + List nonBucketGroupings = new ArrayList<>(groupings); + nonBucketGroupings.remove("bucket"); + for (String grouping : nonBucketGroupings) { + var groupingField = new KeywordFieldMapper.KeywordFieldType(grouping); + extractOperators.add(ValuesSourceReaderOperatorTests.factory(reader, groupingField, ElementType.BYTES_REF).get(ctx)); + } + // _doc, tsid, timestamp, bucket, requests, grouping1, grouping2 + Operator intialAgg = new TimeSeriesAggregationOperatorFactories.Initial( 1, 3, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); - aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL) - ); - Operator finalHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.FINAL, + // tsid, bucket, rate[0][0],rate[0][1],rate[0][2], grouping1, grouping2 + Operator intermediateAgg = new TimeSeriesAggregationOperatorFactories.Intermediate( 0, 1, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); + // tsid, bucket, rate, grouping1, grouping2 + List finalGroups = new ArrayList<>(); + int groupChannel = 3; + for (String grouping : groupings) { + if (grouping.equals("bucket")) { + finalGroups.add(new BlockHash.GroupSpec(1, ElementType.LONG)); + } else { + finalGroups.add(new BlockHash.GroupSpec(groupChannel++, ElementType.BYTES_REF)); + } + } + Operator finalAgg = new TimeSeriesAggregationOperatorFactories.Final( + finalGroups, + List.of(new SumDoubleAggregatorFunctionSupplier(List.of(2))), + List.of(), + between(1, 100) + ).get(ctx); + List results = new ArrayList<>(); - var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( ctx, sourceOperatorFactory.get(ctx), - List.of(ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), initialHash, finalHash), + CollectionUtils.concatLists(extractOperators, List.of(intialAgg, intermediateAgg, finalAgg)), new TestResultPageSinkOperator(results::add), () -> {} ) ); - Map rates = new HashMap<>(); + List> values = new ArrayList<>(); for (Page result : results) { - BytesRefBlock keysBlock = result.getBlock(0); - LongBlock timestampIntervalsBock = result.getBlock(1); - DoubleBlock ratesBlock = result.getBlock(2); - for (int i = 0; i < result.getPositionCount(); i++) { - var key = new Group(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), timestampIntervalsBock.getLong(i)); - rates.put(key, ratesBlock.getDouble(i)); + for (int p = 0; p < result.getPositionCount(); p++) { + int blockCount = result.getBlockCount(); + List row = new ArrayList<>(); + for (int b = 0; b < blockCount; b++) { + row.add(BlockUtils.toJavaObject(result.getBlock(b), p)); + } + values.add(row); } result.releaseBlocks(); } - return rates; + values.sort((v1, v2) -> { + for (int i = 0; i < v1.size(); i++) { + if (v1.get(i) instanceof BytesRef b1) { + int cmp = b1.compareTo((BytesRef) v2.get(i)); + if (cmp != 0) { + return cmp; + } + } else if (v1.get(i) instanceof Long b1) { + int cmp = b1.compareTo((Long) v2.get(i)); + if (cmp != 0) { + return -cmp; + } + } + } + return 0; + }); + return values; } - - record Group(String tsidHash, long timestampInterval) {} } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index bdaa045633dc0..51332b3c8997a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -55,6 +55,7 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; @@ -94,6 +95,8 @@ public void testBasic() throws Exception { ExchangeSink sink1 = sinkExchanger.createExchangeSink(); ExchangeSink sink2 = sinkExchanger.createExchangeSink(); ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletion = new PlainActionFuture<>(); + sourceExchanger.addCompletionListener(sourceCompletion); ExchangeSource source = sourceExchanger.createExchangeSource(); sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, 1); SubscribableListener waitForReading = source.waitForReading(); @@ -133,7 +136,9 @@ public void testBasic() throws Exception { sink2.finish(); assertTrue(sink2.isFinished()); assertTrue(source.isFinished()); + assertFalse(sourceCompletion.isDone()); source.finish(); + sourceCompletion.actionGet(10, TimeUnit.SECONDS); ESTestCase.terminate(threadPool); for (Page page : pages) { page.releaseBlocks(); @@ -320,7 +325,9 @@ protected void start(Driver driver, ActionListener listener) { public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + sourceExchanger.addCompletionListener(sourceCompletionFuture); List sinkHandlers = new ArrayList<>(); Supplier exchangeSink = () -> { final ExchangeSinkHandler sinkHandler; @@ -336,6 +343,7 @@ public void testConcurrentWithHandlers() { final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } public void testEarlyTerminate() { @@ -358,7 +366,7 @@ public void testEarlyTerminate() { assertTrue(sink.isFinished()); } - public void testConcurrentWithTransportActions() throws Exception { + public void testConcurrentWithTransportActions() { MockTransportService node0 = newTransportService(); ExchangeService exchange0 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange0.registerTransportHandler(node0); @@ -371,12 +379,15 @@ public void testConcurrentWithTransportActions() throws Exception { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -427,6 +438,8 @@ public void sendResponse(TransportResponse transportResponse) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalDiscoNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); @@ -438,6 +451,7 @@ public void sendResponse(TransportResponse transportResponse) { assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); sinkHandler.onFailure(new RuntimeException(cause)); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index ca084ab26908d..8f13dd53a0d21 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -67,7 +67,7 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); private static TestFeatureService remoteFeaturesService; - private static RestClient remoteFeaturesServiceClient; + private static RestClient remoteClusterClient; @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { @@ -95,30 +95,34 @@ public MultiClusterSpecIT(String fileName, String groupName, String testName, In @Override protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); - for (String feature : testCase.requiredFeatures) { - assumeTrue("Test " + testName + " requires " + feature, remoteFeaturesService().clusterHasFeature(feature)); - } + checkCapabilities(remoteClusterClient(), remoteFeaturesService(), testName, testCase); assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); assumeTrue("Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, Clusters.oldVersion())); } private TestFeatureService remoteFeaturesService() throws IOException { if (remoteFeaturesService == null) { - HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); - remoteFeaturesServiceClient = super.buildClient(restAdminSettings(), remoteHosts); - var remoteNodeVersions = readVersionsFromNodesInfo(remoteFeaturesServiceClient); + var remoteNodeVersions = readVersionsFromNodesInfo(remoteClusterClient()); var semanticNodeVersions = remoteNodeVersions.stream() .map(ESRestTestCase::parseLegacyVersion) .flatMap(Optional::stream) .collect(Collectors.toSet()); - remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteFeaturesServiceClient), semanticNodeVersions); + remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteClusterClient()), semanticNodeVersions); } return remoteFeaturesService; } + private RestClient remoteClusterClient() throws IOException { + if (remoteClusterClient == null) { + HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); + remoteClusterClient = super.buildClient(restAdminSettings(), remoteHosts); + } + return remoteClusterClient; + } + @AfterClass public static void closeRemoveFeaturesService() throws IOException { - IOUtils.close(remoteFeaturesServiceClient); + IOUtils.close(remoteClusterClient); } @Override diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 072dc5265fe60..7c57212d0f574 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -118,15 +118,16 @@ public void testDoNotLogWithInfo() throws IOException { Map colA = Map.of("name", "DO_NOT_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); - try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { - Streams.readAllLines(log, line -> { assertThat(line, not(containsString("DO_NOT_LOG_ME"))); }); + for (int i = 0; i < cluster.getNumNodes(); i++) { + try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { + Streams.readAllLines(log, line -> assertThat(line, not(containsString("DO_NOT_LOG_ME")))); + } } } finally { setLoggingLevel(null); } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108367") public void testDoLogWithDebug() throws IOException { try { setLoggingLevel("DEBUG"); @@ -136,15 +137,17 @@ public void testDoLogWithDebug() throws IOException { Map colA = Map.of("name", "DO_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); - try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { - boolean[] found = new boolean[] { false }; - Streams.readAllLines(log, line -> { - if (line.contains("DO_LOG_ME")) { - found[0] = true; - } - }); - assertThat(found[0], equalTo(true)); + boolean[] found = new boolean[] { false }; + for (int i = 0; i < cluster.getNumNodes(); i++) { + try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { + Streams.readAllLines(log, line -> { + if (line.contains("DO_LOG_ME")) { + found[0] = true; + } + }); + } } + assertThat(found[0], equalTo(true)); } finally { setLoggingLevel(null); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 448d39913a8f6..0b653a1d92106 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -7,12 +7,15 @@ package org.elasticsearch.xpack.esql.qa.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.http.HttpEntity; +import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; @@ -21,6 +24,7 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.CsvTestUtils; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.RequestObjectBuilder; @@ -56,6 +60,8 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; +// This test can run very long in serverless configurations +@TimeoutSuite(millis = 30 * TimeUnits.MINUTE) public abstract class EsqlSpecTestCase extends ESRestTestCase { // To avoid referencing the main module, we replicate EsqlFeatures.ASYNC_QUERY.id() here @@ -146,12 +152,41 @@ public final void test() throws Throwable { } protected void shouldSkipTest(String testName) throws IOException { - for (String feature : testCase.requiredFeatures) { - assumeTrue("Test " + testName + " requires " + feature, clusterHasFeature(feature)); - } + checkCapabilities(adminClient(), testFeatureService, testName, testCase); assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, Version.CURRENT)); } + protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase) + throws IOException { + if (testCase.requiredCapabilities.isEmpty()) { + return; + } + try { + if (clusterHasCapability(client, "POST", "/_query", List.of(), testCase.requiredCapabilities).orElse(false)) { + return; + } + LOGGER.info("capabilities API returned false, we might be in a mixed version cluster so falling back to cluster features"); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() / 100 == 4) { + /* + * The node we're testing against is too old for the capabilities + * API which means it has to be pretty old. Very old capabilities + * are ALSO present in the features API, so we can check them instead. + * + * It's kind of weird that we check for *any* 400, but that's required + * because old versions of Elasticsearch return 400, not the expected + * 404. + */ + LOGGER.info("capabilities API failed, falling back to cluster features"); + } else { + throw e; + } + } + for (String feature : testCase.requiredCapabilities) { + assumeTrue("Test " + testName + " requires " + feature, testFeatureService.clusterHasFeature("esql." + feature)); + } + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec similarity index 88% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec index 6ddc9601db4ac..3f6ef72d84bc3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec @@ -1,4 +1,7 @@ +# Examples that were published in a blog post + 2023-08-08.full-blown-query +required_capability: enrich_load FROM employees | WHERE still_hired == true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 809f4e9ba2c74..c0572e7bbcd49 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -63,7 +63,7 @@ avg(salary):double | always_false:boolean in -required_feature: esql.mv_warn +required_capability: mv_warn from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true @@ -236,7 +236,7 @@ emp_no:integer |languages:integer |byte2bool:boolean |short2bool:boolean ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = [true, false, true, false] | eval sa = mv_sort(a), sb = mv_sort(a, "DESC"); @@ -245,7 +245,7 @@ a:boolean | sa:boolean | sb:boolean ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(is_rehired, "DESC"), sa = mv_sort(is_rehired) @@ -263,7 +263,7 @@ emp_no:integer | is_rehired:boolean | sa:boolean | sd:boolea ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort row a = [true, false, false, true] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); @@ -273,7 +273,7 @@ a:boolean | a1:boolean | a2:boolean ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(is_rehired, 0) @@ -290,7 +290,7 @@ emp_no:integer | is_rehired:boolean | a1:boolean ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -302,7 +302,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -323,7 +323,7 @@ still_hired:boolean | first_letter:keyword ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -347,7 +347,7 @@ still_hired:boolean | job_positions:keyword ; implicitCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired == "true" | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -355,7 +355,7 @@ emp_no:integer ; implicitCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired != "true" | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -363,7 +363,7 @@ emp_no:integer ; implicitCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired in ("true", "false") | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -371,7 +371,7 @@ emp_no:integer ; implicitCastingInField -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where false in ("true", still_hired) | sort emp_no | keep emp_no | limit 1; emp_no:integer diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec index aa6529c2d4319..508cccc20b86c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -6,7 +6,7 @@ # Test against a polygon similar in size to the Bottom Left polygon whereIntersectsSinglePolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -25,7 +25,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsSinglePolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.001 0.001, 0.999 0.001, 0.999 0.999, 0.001 0.999, 0.001 0.001))")) @@ -38,7 +38,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinSinglePolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -53,7 +53,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointSinglePolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -79,7 +79,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon smaller in size to the Bottom Left polygon whereIntersectsSmallerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -98,7 +98,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsSmallerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -111,7 +111,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinSmallerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -123,7 +123,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointSmallerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -149,7 +149,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon similar in size to the entire test data whereIntersectsLargerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -180,7 +180,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -191,7 +191,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -222,7 +222,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointLargerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -236,7 +236,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon larger than all test data whereIntersectsEvenLargerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -267,7 +267,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsEvenLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -278,7 +278,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinEvenLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -309,7 +309,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointEvenLargerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index 64a8c1d9da316..d4b45ca37fc2d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -130,7 +130,7 @@ error_rate:double | hour:date nullOnMultivaluesMathOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. @@ -142,7 +142,7 @@ a:integer | b:integer | sum:integer notNullOnMultivaluesMathOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NOT NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. @@ -153,7 +153,7 @@ a:integer | b:integer | sum:integer nullOnMultivaluesComparisonOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. @@ -166,7 +166,7 @@ a:integer | b:integer | same:boolean notNullOnMultivaluesComparisonOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. @@ -177,7 +177,7 @@ a:integer | b:integer | same:boolean notNullOnMultivaluesComparisonOperationWithPartialMatch -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec index 43e683e165e29..94dfd9f3267f7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec @@ -1,7 +1,7 @@ // Conversion-specific tests convertToBoolean -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero=0::boolean, one=1::bool ; @@ -10,7 +10,7 @@ false |true ; convertToInteger -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::integer, one="1"::int ; @@ -19,7 +19,7 @@ ROW zero="0"::integer, one="1"::int ; convertToIP -required_feature: esql.casting_operator +required_capability: casting_operator ROW ip="1.1.1.1"::ip ; @@ -28,7 +28,7 @@ ROW ip="1.1.1.1"::ip ; convertToLong -required_feature: esql.casting_operator +required_capability: casting_operator ROW long="-1"::long ; @@ -37,7 +37,7 @@ long:long ; convertToLongWithWarning -required_feature: esql.casting_operator +required_capability: casting_operator ROW long="1.1.1.1"::long ; warning:Line 1:10: evaluation of [\"1.1.1.1\"::long] failed, treating result as null. Only first 20 failures recorded. @@ -48,7 +48,7 @@ null ; convertToDouble -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::double ; @@ -57,7 +57,7 @@ ROW zero="0"::double ; convertToString -required_feature: esql.casting_operator +required_capability: casting_operator ROW one=1::keyword, two=2::text, three=3::string ; @@ -66,7 +66,7 @@ ROW one=1::keyword, two=2::text, three=3::string ; convertToDatetime -required_feature: esql.casting_operator +required_capability: casting_operator ROW date="1985-01-01T00:00:00Z"::datetime, zero=0::datetime ; @@ -75,7 +75,7 @@ ROW date="1985-01-01T00:00:00Z"::datetime, zero=0::datetime ; convertToVersion -required_feature: esql.casting_operator +required_capability: casting_operator ROW ver="1.2.3"::version ; @@ -84,7 +84,7 @@ ROW ver="1.2.3"::version ; convertToUnsignedLong -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::unsigned_long, two=abs(-2)::UnsigneD_LOng ; @@ -93,7 +93,7 @@ ROW zero="0"::unsigned_long, two=abs(-2)::UnsigneD_LOng ; convertToGeoPoint -required_feature: esql.casting_operator +required_capability: casting_operator ROW gp="POINT(0 0)"::geo_point ; @@ -102,7 +102,7 @@ POINT (0.0 0.0) ; convertToGeoShape -required_feature: esql.casting_operator +required_capability: casting_operator ROW gs="POINT(0 0)"::geo_shape ; @@ -111,7 +111,7 @@ POINT (0.0 0.0) ; convertToCartesianPoint -required_feature: esql.casting_operator +required_capability: casting_operator ROW cp="POINT(0 0)"::cartesian_point ; @@ -120,7 +120,7 @@ POINT (0.0 0.0) ; convertToCartesianShape -required_feature: esql.casting_operator +required_capability: casting_operator ROW cs="POINT(0 0)"::cartesian_shape ; @@ -129,7 +129,7 @@ POINT (0.0 0.0) ; convertChained -required_feature: esql.casting_operator +required_capability: casting_operator ROW one=1::STRING::LONG::BOOL ; @@ -138,7 +138,7 @@ true ; convertWithIndexMultipleConversionsInSameExpressionAndConversionInFiltering -required_feature: esql.casting_operator +required_capability: casting_operator FROM employees | EVAL en_str=emp_no::STRING, bd=ABS(birth_date::LONG)::STRING | KEEP en_str, emp_no, bd, birth_date @@ -153,7 +153,7 @@ required_feature: esql.casting_operator ; convertWithBoolExpressionAndQualifiedName -required_feature: esql.casting_operator +required_capability: casting_operator FROM employees | EVAL neg = (NOT still_hired)::string, sf = ROUND(height.scaled_float::double, 2) | KEEP emp_no, still_hired, neg, sf @@ -169,7 +169,7 @@ required_feature: esql.casting_operator ; docsCastOperator -required_feature: esql.casting_operator +required_capability: casting_operator //tag::docsCastOperator[] ROW ver = CONCAT(("0"::INT + 1)::STRING, ".2.3")::VERSION //end::docsCastOperator[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8d54288de552d..22e9231939d02 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -216,7 +216,7 @@ string:keyword |datetime:date ; convertFromUnsignedLong -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warning:Line 1:58: evaluation of [to_datetime(ul)] failed, treating result as null. Only first 20 failures recorded. @@ -357,7 +357,7 @@ date1:date | date2:date | dd_ms:integer ; evalDateDiffString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW date1 = TO_DATETIME("2023-12-02T11:00:00.000Z") | EVAL dd_ms = DATE_DIFF("microseconds", date1, "2023-12-02T11:00:00.001Z") @@ -623,7 +623,7 @@ dt:datetime |plus_post:datetime |plus_pre:datetime datePlusQuarter # "quarter" introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T01:01:01.000Z") | eval plusQuarter = dt + 2 quarters ; @@ -634,7 +634,7 @@ dt:datetime | plusQuarter:datetime datePlusAbbreviatedDurations # abbreviations introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T00:00:00.000Z") | eval plusDurations = dt + 1 h + 2 min + 2 sec + 1 s + 4 ms ; @@ -645,7 +645,7 @@ dt:datetime | plusDurations:datetime datePlusAbbreviatedPeriods # abbreviations introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T00:00:00.000Z") | eval plusDurations = dt + 0 yr + 1y + 2 q + 3 mo + 4 w + 3 d ; @@ -855,7 +855,7 @@ date:date | year:long ; dateExtractString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW date = DATE_PARSE("yyyy-MM-dd", "2022-05-06") | EVAL year = DATE_EXTRACT("year", "2022-05-06") @@ -896,7 +896,7 @@ Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-06-02 ; evalDateFormatString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | EVAL df = DATE_FORMAT("YYYY-MM-dd", "1989-06-02T00:00:00.000Z") @@ -925,7 +925,7 @@ Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-01-01T00:00:00.000 ; evalDateTruncString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | EVAL year_hired = DATE_TRUNC(1 year, "1991-06-26T00:00:00.000Z") @@ -990,7 +990,7 @@ FROM sample_data ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = ["1985-01-01T00:00:00.000Z", "1986-01-01T00:00:00.000Z", "1987-01-01T00:00:00.000Z"] | eval datetime = TO_DATETIME(a) @@ -1019,7 +1019,7 @@ count:long | age:long ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10003 @@ -1031,7 +1031,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1052,7 +1052,7 @@ required_feature: esql.agg_values ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1077,7 +1077,7 @@ required_feature: esql.agg_values ; implicitCastingNotEqual -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date != "1957-05-23T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime @@ -1087,7 +1087,7 @@ emp_no:integer | birth_date:datetime ; implicitCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date <= "1957-05-20T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime @@ -1097,7 +1097,7 @@ emp_no:integer | birth_date:datetime ; implicitCastingGreaterThan -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date > "1957-05-24T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec deleted file mode 100644 index f4bf2333cae86..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec +++ /dev/null @@ -1,67 +0,0 @@ -// This file contains any ESQL snippets from the docs that don't have a home -// anywhere else. The Isle of Misfit Toys. When you need to add new examples -// for the docs you should try to convert an existing test first. Just add -// the comments in whatever file the test already lives in. If you have to -// write a new test to make an example in the docs then put it in whatever -// file matches it's "theme" best. Put it next to similar tests. Not here. - -// Also! When Nik originally extracted examples from the docs to make them -// testable he didn't spend a lot of time putting the docs into appropriate -// files. He just made this one. He didn't put his toys away. We'd be better -// off not adding to this strange toy-pile and instead moving things into -// the appropriate files. - -enrich -// tag::enrich[] -ROW language_code = "1" -| ENRICH languages_policy -// end::enrich[] -; - -// tag::enrich-result[] -language_code:keyword | language_name:keyword -1 | English -// end::enrich-result[] -; - - -enrichOn -// tag::enrich_on[] -ROW a = "1" -| ENRICH languages_policy ON a -// end::enrich_on[] -; - -// tag::enrich_on-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_on-result[] -; - - -enrichWith -// tag::enrich_with[] -ROW a = "1" -| ENRICH languages_policy ON a WITH language_name -// end::enrich_with[] -; - -// tag::enrich_with-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_with-result[] -; - - -enrichRename -// tag::enrich_rename[] -ROW a = "1" -| ENRICH languages_policy ON a WITH name = language_name -// end::enrich_rename[] -; - -// tag::enrich_rename-result[] -a:keyword | name:keyword -1 | English -// end::enrich_rename-result[] -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec deleted file mode 100644 index 367fbf044deed..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ /dev/null @@ -1,350 +0,0 @@ -simple -row language_code = "1" -| enrich languages_policy -; - -language_code:keyword | language_name:keyword -1 | English -; - - -enrichOn -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; - -emp_no:integer | language_name:keyword -10001 | French -; - - -enrichOn2 -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; - -emp_no:integer | language_name:keyword -10001 | French -; - -simpleSortLimit -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; - -emp_no:integer | language_name:keyword -10001 | French -; - - -with -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 -| enrich languages_policy on x with language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -withAlias -from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasSort -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasOverwriteName#[skip:-8.13.0] -from employees | sort emp_no -| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name -| keep emp_no | limit 1 -; - -emp_no:keyword -French -; - - -withAliasAndPlain -from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, language_name; - -emp_no:integer | x:keyword | lang:keyword | language_name:keyword -10100 | 4 | German | German -10099 | 2 | French | French -10098 | 4 | German | German -; - - -withTwoAliasesSameProp -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, lang2 = language_name; - -emp_no:integer | x:keyword | lang:keyword | lang2:keyword -10001 | 2 | French | French -; - - -redundantWith -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -nullInput -from employees | where emp_no == 10017 | keep emp_no, gender -| enrich languages_policy on gender with language_name, language_name; - -emp_no:integer | gender:keyword | language_name:keyword -10017 | null | null -; - - -constantNullInput -from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10020 | null | null -; - - -multipleEnrich -row a = "1", b = "2", c = "10" -| enrich languages_policy on a with a_lang = language_name -| enrich languages_policy on b with b_lang = language_name -| enrich languages_policy on c with c_lang = language_name; - -a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword -1 | 2 | 10 | English | French | null -; - - -enrichEval -from employees | eval x = to_string(languages) -| enrich languages_policy on x with lang = language_name -| eval language = concat(x, "-", lang) -| keep emp_no, x, lang, language -| sort emp_no desc | limit 3; - -emp_no:integer | x:keyword | lang:keyword | language:keyword -10100 | 4 | German | 4-German -10099 | 2 | French | 2-French -10098 | 4 | German | 4-German -; - - -multivalue -required_feature: esql.mv_sort -row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); - -a:keyword | a_lang:keyword -["1", "2"] | ["English", "French"] -; - - -enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env -| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) -| KEEP client_ip, count_env, max_env -| SORT client_ip -; - -client_ip:ip | count_env:i | max_env:keyword -172.21.0.5 | 1 | Development -172.21.2.113 | 2 | QA -172.21.2.162 | 2 | QA -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -; - - -enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr -| KEEP client_ip, env, client_cidr -| SORT client_ip -; - -client_ip:ip | env:keyword | client_cidr:ip_range -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.0.5 | Development | 172.21.0.0/16 -172.21.2.113 | [Development, QA] | 172.21.2.0/24 -172.21.2.162 | [Development, QA] | 172.21.2.0/24 -; - - -enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date > "1960-01-01" -| EVAL birth_year = DATE_EXTRACT("year", birth_date) -| EVAL age = 2022 - birth_year -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group, birth_year -| KEEP birth_year, age_group, count -| SORT birth_year DESC -; - -birth_year:long | age_group:keyword | count:long -1965 | Middle-aged | 1 -1964 | Middle-aged | 4 -1963 | Middle-aged | 7 -1962 | Senior | 6 -1961 | Senior | 8 -1960 | Senior | 8 -; - - -enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date IS NOT NULL -| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group -| SORT count DESC -; - -count:long | age_group:keyword -78 | Senior -12 | Middle-aged -; - - -enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH heights_policy ON height WITH height_group = description -| STATS count=count(height_group), min=min(height), max=max(height) BY height_group -| KEEP height_group, min, max, count -| SORT min ASC -; - -height_group:k | min:double | max:double | count:long -Very Short | 1.41 | 1.48 | 9 -Short | 1.5 | 1.59 | 20 -Medium Height | 1.61 | 1.79 | 26 -Tall | 1.8 | 1.99 | 25 -Very Tall | 2.0 | 2.1 | 20 -; - - -enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description -| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description -| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description -| KEEP birth_decade, hire_decade, birth_description, hire_description, count -| SORT birth_decade DESC, hire_decade DESC -; - -birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long -null | 1990 | null | Nineties Nostalgia | 6 -null | 1980 | null | Radical Eighties | 4 -1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 -1960 | 1980 | Swinging Sixties | Radical Eighties | 21 -1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 -1950 | 1980 | Nifty Fifties | Radical Eighties | 34 -; - - -spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_names ON city WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.mv_warn - -FROM airports -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) -; -warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. -warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value - -city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer -POINT(1.396561 24.127649) | 872 | 88 | 1044 -; - - -spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*) BY airport_in_city -| SORT count ASC -; - -count:long | airport_in_city:boolean -114 | null -396 | true -455 | false -; - - -spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city -| SORT count ASC -; - -count:long | centroid:geo_point | airport_in_city:boolean -114 | POINT (-24.750062 31.575549) | null -396 | POINT (-2.534797 20.667712) | true -455 | POINT (3.090752 27.676442) | false -; - - -spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "IDR" -| ENRICH city_airports ON name WITH city_name = city, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length -; - -abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i -IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index f5847260bbb16..bd384886f0dd7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -1,10 +1,10 @@ -simple +simpleNoLoad from employees | eval x = 1, y = to_string(languages) | enrich languages_policy on y | where x > 1 | keep emp_no, language_name | limit 1; emp_no:integer | language_name:keyword ; -docsGettingStartedEnrich +docsGettingStartedEnrichNoLoad // tag::gs-enrich[] FROM sample_data | KEEP @timestamp, client_ip, event_duration @@ -30,3 +30,458 @@ FROM sample_data median_duration:double | env:keyword ; + +simple +required_capability: enrich_load + +// tag::enrich[] +ROW language_code = "1" +| ENRICH languages_policy +// end::enrich[] +; + +// tag::enrich-result[] +language_code:keyword | language_name:keyword +1 | English +// end::enrich-result[] +; + +enrichOnSimple +required_capability: enrich_load + +// tag::enrich_on[] +ROW a = "1" +| ENRICH languages_policy ON a +// end::enrich_on[] +; + +// tag::enrich_on-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_on-result[] +; + + +enrichOn +required_capability: enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; + +emp_no:integer | language_name:keyword +10001 | French +; + + +enrichOn2 +required_capability: enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; + +emp_no:integer | language_name:keyword +10001 | French +; + + +simpleSortLimit +required_capability: enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; + +emp_no:integer | language_name:keyword +10001 | French +; + +with +required_capability: enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 +| enrich languages_policy on x with language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +withSimple +required_capability: enrich_load + +// tag::enrich_with[] +ROW a = "1" +| ENRICH languages_policy ON a WITH language_name +// end::enrich_with[] +; + +// tag::enrich_with-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_with-result[] +; + + +withAlias +required_capability: enrich_load + +from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + +withAliasSimple +required_capability: enrich_load + +// tag::enrich_rename[] +ROW a = "1" +| ENRICH languages_policy ON a WITH name = language_name +// end::enrich_rename[] +; + +// tag::enrich_rename-result[] +a:keyword | name:keyword +1 | English +// end::enrich_rename-result[] +; + + +withAliasSort +required_capability: enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasOverwriteName#[skip:-8.13.0] +required_capability: enrich_load + +from employees | sort emp_no +| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name +| keep emp_no | limit 1 +; + +emp_no:keyword +French +; + +withAliasAndPlain +required_capability: enrich_load + +from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, language_name; + +emp_no:integer | x:keyword | lang:keyword | language_name:keyword +10100 | 4 | German | German +10099 | 2 | French | French +10098 | 4 | German | German +; + + +withTwoAliasesSameProp +required_capability: enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, lang2 = language_name; + +emp_no:integer | x:keyword | lang:keyword | lang2:keyword +10001 | 2 | French | French +; + + +redundantWith +required_capability: enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +nullInput +required_capability: enrich_load + +from employees | where emp_no == 10017 | keep emp_no, gender +| enrich languages_policy on gender with language_name, language_name; + +emp_no:integer | gender:keyword | language_name:keyword +10017 | null | null +; + + +constantNullInput +required_capability: enrich_load + +from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10020 | null | null +; + + +multipleEnrich +required_capability: enrich_load + +row a = "1", b = "2", c = "10" +| enrich languages_policy on a with a_lang = language_name +| enrich languages_policy on b with b_lang = language_name +| enrich languages_policy on c with c_lang = language_name; + +a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword +1 | 2 | 10 | English | French | null +; + + +enrichEval +required_capability: enrich_load + +from employees | eval x = to_string(languages) +| enrich languages_policy on x with lang = language_name +| eval language = concat(x, "-", lang) +| keep emp_no, x, lang, language +| sort emp_no desc | limit 3; + +emp_no:integer | x:keyword | lang:keyword | language:keyword +10100 | 4 | German | 4-German +10099 | 2 | French | 2-French +10098 | 4 | German | 4-German +; + + +multivalue +required_capability: enrich_load +required_capability: mv_sort + +row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); + +a:keyword | a_lang:keyword +["1", "2"] | ["English", "French"] +; + + +enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] +required_capability: enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env +| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) +| KEEP client_ip, count_env, max_env +| SORT client_ip +; + +client_ip:ip | count_env:i | max_env:keyword +172.21.0.5 | 1 | Development +172.21.2.113 | 2 | QA +172.21.2.162 | 2 | QA +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +; + + +enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] +required_capability: enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr +| KEEP client_ip, env, client_cidr +| SORT client_ip +; + +client_ip:ip | env:keyword | client_cidr:ip_range +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.0.5 | Development | 172.21.0.0/16 +172.21.2.113 | [Development, QA] | 172.21.2.0/24 +172.21.2.162 | [Development, QA] | 172.21.2.0/24 +; + + +enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load + +FROM employees +| WHERE birth_date > "1960-01-01" +| EVAL birth_year = DATE_EXTRACT("year", birth_date) +| EVAL age = 2022 - birth_year +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group, birth_year +| KEEP birth_year, age_group, count +| SORT birth_year DESC +; + +birth_year:long | age_group:keyword | count:long +1965 | Middle-aged | 1 +1964 | Middle-aged | 4 +1963 | Middle-aged | 7 +1962 | Senior | 6 +1961 | Senior | 8 +1960 | Senior | 8 +; + + +enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load + +FROM employees +| WHERE birth_date IS NOT NULL +| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group +| SORT count DESC +; + +count:long | age_group:keyword +78 | Senior +12 | Middle-aged +; + + +enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load + +FROM employees +| ENRICH heights_policy ON height WITH height_group = description +| STATS count=count(height_group), min=min(height), max=max(height) BY height_group +| KEEP height_group, min, max, count +| SORT min ASC +; + +height_group:k | min:double | max:double | count:long +Very Short | 1.41 | 1.48 | 9 +Short | 1.5 | 1.59 | 20 +Medium Height | 1.61 | 1.79 | 26 +Tall | 1.8 | 1.99 | 25 +Very Tall | 2.0 | 2.1 | 20 +; + + +enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load + +FROM employees +| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description +| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description +| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description +| KEEP birth_decade, hire_decade, birth_description, hire_description, count +| SORT birth_decade DESC, hire_decade DESC +; + +birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long +null | 1990 | null | Nineties Nostalgia | 6 +null | 1980 | null | Radical Eighties | 4 +1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 +1960 | 1980 | Swinging Sixties | Radical Eighties | 21 +1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 +1950 | 1980 | Nifty Fifties | Radical Eighties | 34 +; + + +spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_names ON city WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load +required_capability: mv_warn + +FROM airports +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) +; +warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value + +city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer +POINT(1.396561 24.127649) | 872 | 88 | 1044 +; + + +spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_capability: enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*) BY airport_in_city +| SORT count ASC +; + +count:long | airport_in_city:boolean +114 | null +396 | true +455 | false +; + + +spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_capability: enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city +| SORT count ASC +; + +count:long | centroid:geo_point | airport_in_city:boolean +114 | POINT (-24.750062 31.575549) | null +396 | POINT (-2.534797 20.667712) | true +455 | POINT (3.090752 27.676442) | false +; + + +spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_capability: enrich_load + +FROM airports +| WHERE abbrev == "IDR" +| ENRICH city_airports ON name WITH city_name = city, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length +; + +abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 85b665d717449..571d7835451c3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -201,7 +201,7 @@ Kyoichi. |Kyoichi.Maliniak |Kyoichi.MaliniakKyoichi. |Kyoichi ; roundArrays -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2]), f = round([1.2, 4.6]), g = round([1.14], 1), h = round([1.14], [1, 2]); warning:Line 1:56: evaluation of [round(b)] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 8af770c521243..1f2bcb6b51209 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -92,7 +92,7 @@ int:integer |dbl:double ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded. @@ -108,7 +108,7 @@ emp_no:integer |salary_change:double ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded. @@ -124,7 +124,7 @@ emp_no:integer |salary_change:double ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. @@ -136,7 +136,7 @@ emp_no:integer |salary_change:double ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. @@ -149,7 +149,7 @@ emp_no:integer |salary_change:double ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. @@ -162,7 +162,7 @@ emp_no:integer |salary_change:double ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded.] @@ -178,7 +178,7 @@ emp_no:integer |salary_change:double ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded.] @@ -194,7 +194,7 @@ emp_no:integer |salary_change:double ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change == 1.19)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded.] @@ -241,7 +241,7 @@ row a = [1.1, 2.1, 2.1] | eval da = mv_dedupe(a); ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change, 0, 1) @@ -436,7 +436,7 @@ ROW deg = [90.0, 180.0, 270.0] ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = [4.0, 2.0, -3.0, 2.0] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); @@ -445,7 +445,7 @@ a:double | sa:double | sd:double ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change, "DESC"), sa = mv_sort(salary_change) @@ -467,7 +467,7 @@ emp_no:integer | salary_change:double | sa:double | sd:double ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -479,7 +479,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -500,7 +500,7 @@ required_feature: esql.agg_values ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index c2c0b82f1a664..00a8c0da8f14c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -130,7 +130,7 @@ c:l | name:k ; convertFromDatetimeWithOptions -required_feature: esql.from_options +required_capability: from_options // tag::convertFromDatetimeWithOptions[] FROM employees OPTIONS "allow_no_indices"="false","preference"="_local" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 69ae951e4290d..e247d6c3a04ef 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,7 @@ // Integral types-specific tests inLongAndInt -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; warning:Line 1:24: evaluation of [avg_worked_seconds in (372957040, salary_change.long, 236703986)] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +68,7 @@ long:long |ul:ul ; convertDoubleToUL -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warningRegex:Line 1:48: evaluation of \[to_ul\(1e20\)\] failed, treating result as null. Only first 20 failures recorded. @@ -127,7 +127,7 @@ int:integer |long:long ; convertULToLong -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warningRegex:Line 1:67: evaluation of \[to_long\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -170,7 +170,7 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long ; convertDoubleToLong -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warningRegex:Line 1:51: evaluation of \[to_long\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -190,7 +190,7 @@ int:integer |ii:integer ; convertLongToInt -required_feature:esql.convert_warn +required_capability: convert_warn // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] @@ -207,7 +207,7 @@ long:long |int:integer ; convertULToInt -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warningRegex:Line 1:57: evaluation of \[to_int\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -239,7 +239,7 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer ; convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] -required_feature: esql.mv_warn +required_capability: mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warning:Line 1:79: evaluation of [to_integer(str1)] failed, treating result as null. Only first 20 failures recorded. @@ -254,7 +254,7 @@ str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer | ; convertDoubleToInt -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warningRegex:Line 1:54: evaluation of \[to_integer\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -265,7 +265,7 @@ d:double |d2i:integer |overflow:integer ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded. @@ -281,7 +281,7 @@ emp_no:integer |salary_change.int:integer ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded. @@ -297,7 +297,7 @@ emp_no:integer |salary_change.int:integer ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int == 0] failed, treating result as null. Only first 20 failures recorded. @@ -312,7 +312,7 @@ emp_no:integer |salary_change.int:integer ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int] failed, treating result as null. Only first 20 failures recorded. @@ -325,7 +325,7 @@ emp_no:integer |salary_change.int:integer ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int in (1, 7)] failed, treating result as null. Only first 20 failures recorded. @@ -338,7 +338,7 @@ emp_no:integer |salary_change.int:integer ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded.] @@ -354,7 +354,7 @@ emp_no:integer |salary_change.int:integer ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded.] @@ -370,7 +370,7 @@ emp_no:integer |salary_change.int:integer ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int == 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int == 1] failed, treating result as null. Only first 20 failures recorded.] @@ -417,7 +417,7 @@ row a = [1, 2, 2, 3] | eval da = mv_dedupe(a); ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_sort[] ROW a = [4, 2, -3, 2] @@ -432,7 +432,7 @@ a:integer | sa:integer | sd:integer ; mvSortEmpInt -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change.int, "DESC"), sa = mv_sort(salary_change.int) @@ -454,7 +454,7 @@ emp_no:integer | salary_change.int:integer | sa:integer | sd:integer ; mvSortEmpLong -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change.long, "DESC"), sa = mv_sort(salary_change.long) @@ -476,7 +476,7 @@ emp_no:integer | salary_change.long:long | sa:long | sd:long ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_slice_positive[] row a = [1, 2, 2, 3] @@ -490,7 +490,7 @@ a:integer | a1:integer | a2:integer ; mvSliceNegativeOffset -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_slice_negative[] row a = [1, 2, 2, 3] @@ -504,7 +504,7 @@ a:integer | a1:integer | a2:integer ; mvSliceSingle -required_feature: esql.mv_sort +required_capability: mv_sort row a = 1 | eval a1 = mv_slice(a, 0); @@ -514,7 +514,7 @@ a:integer | a1:integer ; mvSliceOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, 4), a2 = mv_slice(a, 2, 6), a3 = mv_slice(a, 4, 6); @@ -524,7 +524,7 @@ a:integer | a1:integer | a2:integer | a3:integer ; mvSliceEmpInt -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 0, 1) @@ -541,7 +541,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntSingle -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 1) @@ -558,7 +558,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntEndOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 1, 4) @@ -575,7 +575,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 2, 4) @@ -592,7 +592,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntStartOutOfBoundNegative -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, -5, -2) @@ -609,7 +609,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntOutOfBoundNegative -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, -5, -3) @@ -626,7 +626,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpLong -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.long, 0, 1) @@ -750,7 +750,7 @@ x:long ; valuesLong -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -762,7 +762,7 @@ required_feature: esql.agg_values ; valuesLongGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -783,7 +783,7 @@ required_feature: esql.agg_values ; valuesLongGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -807,7 +807,7 @@ required_feature: esql.agg_values ; valuesInt -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -819,7 +819,7 @@ required_feature: esql.agg_values ; valuesIntGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -840,7 +840,7 @@ l:integer | first_letter:keyword ; valuesIntGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -864,7 +864,7 @@ required_feature: esql.agg_values ; valuesShort -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -876,7 +876,7 @@ required_feature: esql.agg_values ; valuesShortGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -897,7 +897,7 @@ l:integer | first_letter:keyword ; valuesShortGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 8d3c0c9186c6c..ae683acbb2c3a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -16,7 +16,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; equals -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; warning:Line 1:38: evaluation of [ip0 == ip1] failed, treating result as null. Only first 20 failures recorded. @@ -60,7 +60,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; lessThan -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; warning:Line 1:43: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. @@ -73,7 +73,7 @@ lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:f ; notEquals -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; warning:Line 1:43: evaluation of [ip0 != ip1] failed, treating result as null. Only first 20 failures recorded. @@ -125,7 +125,7 @@ null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; conditional -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true @@ -146,7 +146,7 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb ; in -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -168,7 +168,7 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -188,7 +188,7 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece ; cidrMatchSimple -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\")] failed, treating result as null. Only first 20 failures recorded. @@ -199,7 +199,7 @@ eth1 |beta |127.0.0.1 |127.0.0.2 ; cidrMatchNullField -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true @@ -213,7 +213,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs -required_feature: esql.mv_warn +required_capability: mv_warn //tag::cdirMatchMultipleArgs[] FROM hosts @@ -233,7 +233,7 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFunctionArg -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -246,7 +246,7 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFieldArg -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -294,7 +294,7 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithIn -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true @@ -308,7 +308,7 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithComparision -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true @@ -324,7 +324,7 @@ eth0 |fe80::cae2:65ff:fece:fec1 ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort FROM hosts | eval sd = mv_sort(ip1, "DESC"), sa = mv_sort(ip1) @@ -342,7 +342,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | where host == "epsilon" @@ -358,7 +358,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | where host == "epsilon" @@ -374,7 +374,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvZip -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | eval zip = mv_zip(to_string(description), to_string(ip0), "@@") @@ -392,7 +392,7 @@ epsilon | null | null ; values -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | STATS ip0=MV_SORT(VALUES(ip0)) @@ -403,7 +403,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | EVAL host=SUBSTRING(host, 0, 1) @@ -419,7 +419,7 @@ fe80::cae2:65ff:fece:feb9 | g ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | STATS ip0=MV_SORT(VALUES(ip0)) BY host @@ -434,7 +434,7 @@ fe80::cae2:65ff:fece:feb9 | gamma ; implictCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) == "127.0.0.1" | keep host, ip0 | sort host; host:keyword | ip0:ip @@ -445,7 +445,7 @@ beta | 127.0.0.1 ; implictCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) != "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -455,7 +455,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; implictCastingGreaterThan -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) > "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -465,7 +465,7 @@ gamma | fe80::cae2:65ff:fece:feb9 ; implictCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) <= "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -475,7 +475,7 @@ beta | 127.0.0.1 ; implictCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) in ( "127.0.0.1", "::1") | keep host, ip0 | sort host, ip0; host:keyword | ip0:ip diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index e0604acbcce1d..4e080bac0ed2e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -201,7 +201,7 @@ height:double | s:double ; powSalarySquared -required_feature: esql.pow_double +required_capability: pow_double from employees | eval s = pow(to_long(salary) - 75000, 2) + 10000 | keep salary, s | sort salary desc | limit 4; @@ -618,7 +618,7 @@ base:double | exponent:integer | result:double ; powIntInt -required_feature: esql.pow_double +required_capability: pow_double ROW base = 2, exponent = 2 | EVAL s = POW(base, exponent) @@ -629,7 +629,7 @@ base:integer | exponent:integer | s:double ; powIntIntPlusInt -required_feature: esql.pow_double +required_capability: pow_double row s = 1 + pow(2, 2); @@ -645,7 +645,7 @@ s:double ; powIntUL -required_feature: esql.pow_double +required_capability: pow_double row x = pow(1, 9223372036854775808); @@ -654,7 +654,7 @@ x:double ; powLongUL -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(1) | eval x = pow(x, 9223372036854775808); @@ -663,7 +663,7 @@ x:double ; powUnsignedLongUL -required_feature: esql.pow_double +required_capability: pow_double row x = to_ul(1) | eval x = pow(x, 9223372036854775808); @@ -688,7 +688,7 @@ null ; powULInt -required_feature: esql.pow_double +required_capability: pow_double row x = pow(to_unsigned_long(9223372036854775807), 1); @@ -697,7 +697,7 @@ x:double ; powULIntOverrun -required_feature: esql.pow_double +required_capability: pow_double ROW x = POW(9223372036854775808, 2) ; @@ -719,7 +719,7 @@ x:double ; powULLong -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(10) | eval x = pow(to_unsigned_long(10), x); @@ -728,7 +728,7 @@ x:double ; powULLongOverrun -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(100) | eval x = pow(to_unsigned_long(10), x); @@ -1414,7 +1414,7 @@ Anneke |Preusig |1.56 |1.56 ; evalAbsString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW number = -1.0 | EVAL abs_number = ABS("10.0") @@ -1425,7 +1425,7 @@ number:double | abs_number:double ; functionUnderArithmeticOperationAggString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | eval x = date_trunc(1 month, "2024-11-22") + 2 days, y = x + 3 days @@ -1437,7 +1437,7 @@ count():long | y:date ; functionUnderArithmeticOperationString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | eval x = date_trunc(1 month, "2024-11-22") + 2 days, y = x + 3 days diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 1d523640731d7..bd52d3b26b336 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -28,8 +28,8 @@ double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" "keyword from_base64(string:keyword|text)" -"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" -"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"boolean|double|integer|ip|keyword|long|text|version greatest(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" +"boolean|double|integer|ip|keyword|long|text|version least(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" "integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" @@ -123,10 +123,10 @@ atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsign avg |number |"double|integer|long" |[""] bin |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] bucket |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] -case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] +case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |[A condition., The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches.] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., CIDR block to test the IP against.] -coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate +coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate. concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. @@ -141,8 +141,8 @@ e |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] floor |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. from_base64 |string |"keyword|text" |A base64 string. -greatest |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] -least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] +greatest |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. +least |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |String expression. If `null`, the function returns `null`. locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] @@ -180,12 +180,12 @@ sinh |angle |"double|integer|long|unsigne split |[string, delim] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., Delimiter. Only single byte delimiters are currently supported.] sqrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." st_centroid_ag|field |"geo_point|cartesian_point" |[""] -st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_x |point |"geo_point|cartesian_point" |[""] -st_y |point |"geo_point|cartesian_point" |[""] +st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_x |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. +st_y |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. starts_with |[str, prefix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] substring |[string, start, length] |["keyword|text", integer, integer] |[String expression. If `null`\, the function returns `null`., Start position., Length of the substring from the start position. Optional; if omitted\, all positions after `start` are returned.] sum |number |"double|integer|long" |[""] @@ -237,7 +237,7 @@ atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the avg |The average of a numeric field. bin |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. bucket |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. -case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. coalesce |Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. @@ -255,8 +255,8 @@ e |Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. ends_with |Returns a boolean that indicates whether a keyword string ends with another string. floor |Round a number down to the nearest integer. from_base64 |Decode a base64 string. -greatest |Returns the maximum value from many columns. -least |Returns the minimum value from many columns. +greatest |Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +least |Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. locate |Returns an integer that indicates the position of a keyword substring within another string @@ -294,12 +294,12 @@ sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. The input can be any numeric value, the return value is always a double. Square roots of negative numbers and infinites are null. st_centroid_ag|The centroid of a spatial field. -st_contains |Returns whether the first geometry contains the second geometry. -st_disjoint |Returns whether the two geometries or geometry columns are disjoint. -st_intersects |Returns whether the two geometries or geometry columns intersect. -st_within |Returns whether the first geometry is within the second geometry. -st_x |Extracts the x-coordinate from a point geometry. -st_y |Extracts the y-coordinate from a point geometry. +st_contains |Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. +st_disjoint |Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ +st_intersects |Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ +st_within |Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. +st_x |Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +st_y |Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. starts_with |Returns a boolean that indicates whether a keyword string starts with another string. substring |Returns a substring of a string, specified by a start position and an optional length sum |The sum of a numeric field. @@ -370,8 +370,8 @@ e |double ends_with |boolean |[false, false] |false |false floor |"double|integer|long|unsigned_long" |false |false |false from_base64 |keyword |false |false |false -greatest |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false -least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false +greatest |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false +least |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false locate |integer |[false, false, true] |false |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index bcb9718048085..b4cd18f728858 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -1,5 +1,5 @@ simpleKeep -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -8,7 +8,7 @@ emp_no:integer |_index:keyword |_version:long ; aliasWithSameName -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | eval _index = _index, _version = _version | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -17,7 +17,7 @@ emp_no:integer |_index:keyword |_version:long ; inComparison -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | where _index == "employees" | where _version == 1 | keep emp_no | limit 2; emp_no:integer @@ -26,7 +26,7 @@ emp_no:integer ; metaIndexInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields // tag::metaIndexInAggs[] FROM employees METADATA _index, _id | STATS max = MAX(emp_no) BY _index @@ -40,7 +40,7 @@ max:integer |_index:keyword ; metaIndexAliasedInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; @@ -49,7 +49,7 @@ max:integer |_i:keyword ; metaVersionInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | stats min = min(emp_no) by _version; min:integer |_version:long @@ -57,7 +57,7 @@ min:integer |_version:long ; metaVersionAliasedInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | eval _v = _version | stats min = min(emp_no) by _v; min:integer |_v:long @@ -65,7 +65,7 @@ min:integer |_v:long ; inAggsAndAsGroups -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | stats max = max(_version) by _index; max:long |_index:keyword @@ -73,7 +73,7 @@ max:long |_index:keyword ; inAggsAndAsGroupsAliased -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | eval _i = _index, _v = _version | stats max = max(_v) by _i; max:long |_i:keyword @@ -81,7 +81,7 @@ max:long |_i:keyword ; inFunction -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | where length(_index) == length("employees") | where abs(_version) == 1 | keep emp_no | limit 2; emp_no:integer @@ -90,7 +90,7 @@ emp_no:integer ; inArithmetics -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | eval i = _version + 2 | stats min = min(emp_no) by i; min:integer |i:long @@ -98,7 +98,7 @@ min:integer |i:long ; inSort -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort _version, _index, emp_no | keep emp_no, _version, _index | limit 2; emp_no:integer |_version:long |_index:keyword @@ -107,7 +107,7 @@ emp_no:integer |_version:long |_index:keyword ; withMvFunction -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | eval i = mv_avg(_version) + 2 | stats min = min(emp_no) by i; min:integer |i:double @@ -115,7 +115,7 @@ min:integer |i:double ; overwritten -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | eval _index = 3, _version = "version" | keep emp_no, _index, _version | limit 3; emp_no:integer |_index:integer |_version:keyword @@ -125,7 +125,7 @@ emp_no:integer |_index:integer |_version:keyword ; multipleIndices -required_feature: esql.metadata_fields +required_capability: metadata_fields // tag::multipleIndices[] FROM ul_logs, apps METADATA _index, _version | WHERE id IN (13, 14) AND _version == 1 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 26fcca423d28d..6d6b3b0782a98 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -3,7 +3,7 @@ ############################################### convertFromStringQuantize -required_feature: esql.spatial_points +required_capability: spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); @@ -13,7 +13,7 @@ POINT(42.97109629958868 14.7552534006536) |POINT(42.97109629958868 14.7552534006 ; convertFromString -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-str[] ROW wkt = "POINT(42.97109630194 14.7552534413725)" @@ -28,7 +28,7 @@ wkt:keyword |pt:geo_point ; convertFromStringArray -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source row wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geopoint(wkt); @@ -38,7 +38,7 @@ wkt:keyword ; centroidFromStringNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg row wkt = "POINT(42.97109629958868 14.7552534006536)" | STATS c = ST_CENTROID_AGG(TO_GEOPOINT(wkt)); @@ -48,7 +48,7 @@ POINT(42.97109629958868 14.7552534006536) ; centroidFromString1 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] | MV_EXPAND wkt @@ -60,7 +60,7 @@ POINT(42.97109629958868 14.7552534006536) ; centroidFromString2 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] | MV_EXPAND wkt @@ -72,7 +72,7 @@ POINT(59.390193899162114 18.741501288022846) ; centroidFromString3 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt @@ -84,7 +84,7 @@ POINT(39.58327988510707 20.619513023697994) ; centroidFromString4 -required_feature: esql.st_x_y +required_capability: st_x_y ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt @@ -97,7 +97,7 @@ POINT(39.58327988510707 20.619513023697994) | 39.58327988510707 | 20.61951302369 ; stXFromString -required_feature: esql.st_x_y +required_capability: st_x_y // tag::st_x_y[] ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") @@ -112,7 +112,7 @@ POINT(42.97109629958868 14.7552534006536) | 42.97109629958868 | 14.755253400653 ; simpleLoad -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; @@ -132,7 +132,7 @@ ZAH | Zāhedān | POINT(60.8628 29.4964) | Iran ; stXFromAirportsSupportsNull -required_feature: esql.st_x_y +required_capability: st_x_y FROM airports | EVAL x = FLOOR(ABS(ST_X(city_location))/200), y = FLOOR(ABS(ST_Y(city_location))/100) @@ -149,7 +149,7 @@ c:long | x:double | y:double # Tests for ST_CENTROID on GEO_POINT type centroidFromAirports -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg // tag::st_centroid_agg-airports[] FROM airports @@ -164,7 +164,7 @@ POINT(-0.030548143003023033 24.37553649504829) ; centroidFromAirportsNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(TO_GEOPOINT(location)) @@ -175,7 +175,7 @@ POINT (-0.03054810272375508 24.37553651570554) ; centroidFromAirportsCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() @@ -186,7 +186,7 @@ POINT(-0.030548143003023033 24.37553649504829) | 891 ; centroidFromAirportsCountGrouped -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -205,7 +205,7 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -217,7 +217,7 @@ POINT(83.27726172452623 28.99289782286029) | 33 ; centroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -229,7 +229,7 @@ POINT (7.572387259169772 26.836561792945492) | 891 ; centroidFromAirportsCountCityLocations -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() @@ -240,7 +240,7 @@ POINT (1.3965610809060276 24.127649406297987) | 891 ; centroidFromAirportsCountGroupedCountry -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country @@ -269,7 +269,7 @@ POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan ; centroidFromAirportsFilteredCountry -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE country == "United States" @@ -281,7 +281,7 @@ POINT (-97.3333946136801 38.07953176370194) | 129 ; centroidFromAirportsCountGroupedCountryCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country @@ -293,7 +293,7 @@ POINT (17.55538044598613 18.185558743854063) | 891 ; centroidFromAirportsCountryCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS airports=ST_CENTROID_AGG(location), cities=ST_CENTROID_AGG(city_location), count=COUNT() @@ -304,7 +304,7 @@ POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.12 ; centroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -318,7 +318,7 @@ POINT(78.73736493755132 26.761841227998957) | 12 ; centroidFromAirportsAfterMvExpand -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | MV_EXPAND type @@ -330,7 +330,7 @@ POINT(2.121611400672094 24.559172889205755) | 933 ; centroidFromAirportsGroupedAfterMvExpand -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | MV_EXPAND type @@ -350,7 +350,7 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsGroupedAfterMvExpandFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -363,7 +363,7 @@ POINT(83.16847535921261 28.79002037679311) | 40 | 9 ; centroidFromAirportsAfterMvExpandFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -376,7 +376,7 @@ POINT(83.16847535921261 28.79002037679311) | 40 ; centroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE country == "United Kingdom" @@ -388,7 +388,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterIntersectsPredicateCountryUK -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -400,7 +400,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterContainsPredicateCountryUK -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))"), location) @@ -412,7 +412,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterWithinPredicateCountryUK -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -424,7 +424,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE country == "United Kingdom" @@ -443,7 +443,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 | true ; centroidFromAirportsAfterIntersectsEvalExpression -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL in_uk = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -461,7 +461,7 @@ POINT (0.04453958108176276 23.74658354606057) | 873 | false ; centroidFromAirportsAfterIntersectsPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -473,7 +473,7 @@ POINT (42.97109629958868 14.7552534006536) | 1 ; centroidFromAirportsAfterIntersectsCompoundPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" @@ -488,7 +488,7 @@ POINT (42.97109629958868 14.7552534006536) | 1 # Tests for ST_INTERSECTS on GEO_POINT type pointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects // tag::st_intersects-airports[] FROM airports @@ -503,7 +503,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; pointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -514,7 +514,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; literalPointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -528,7 +528,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -542,7 +542,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonOneRow -required_feature: esql.st_intersects +required_capability: st_intersects ROW intersects = ST_INTERSECTS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -552,7 +552,7 @@ true ; cityInCityBoundary -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | EVAL in_city = ST_INTERSECTS(city_location, city_boundary) @@ -568,7 +568,7 @@ cardinality:k | in_city:boolean ; cityNotInCityBoundaryBiggest -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | WHERE NOT ST_INTERSECTS(city_location, city_boundary) @@ -583,7 +583,7 @@ SYX | Sanya Phoenix Int'l | Sanya | POINT(109.5036 18.253 ; airportCityLocationPointIntersection -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) @@ -594,7 +594,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointIntersectionCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) @@ -609,7 +609,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for ST_DISJOINT on GEO_POINT type literalPolygonDisjointLiteralPoint -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -623,7 +623,7 @@ wkt:keyword | pt:geo_point ; literalPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -637,7 +637,7 @@ wkt:keyword | pt:geo_point ; literalPolygonDisjointLiteralPointOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -647,7 +647,7 @@ false ; literalPointDisjointLiteralPolygonOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_GEOPOINT("POINT(-1 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -657,7 +657,7 @@ true ; pointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports | WHERE ST_DISJOINT(location, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) @@ -679,7 +679,7 @@ x:double | y:double | count:long ; airportCityLocationPointDisjointCentroid -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_mp | WHERE ST_DISJOINT(location, city_location) @@ -694,7 +694,7 @@ POINT (67.8581917192787 24.02956652920693) | POINT (67.81638333333332 24.0489999 # Tests for ST_CONTAINS on GEO_POINT type literalPolygonContainsLiteralPoint -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -708,7 +708,7 @@ wkt:keyword | pt:geo_point ; literalPointDoesNotContainLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -720,7 +720,7 @@ wkt:keyword | pt:geo_point ; literalPolygonContainsLiteralPointOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW contains = ST_CONTAINS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -730,7 +730,7 @@ true ; literalPointDoesNotContainLiteralPolygonOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW contains = ST_CONTAINS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -740,7 +740,7 @@ false ; pointContainsLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -750,7 +750,7 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword ; pointContainedInLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -761,7 +761,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; airportCityLocationPointContains -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) @@ -772,7 +772,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointContainsCentroid -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) @@ -787,7 +787,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for ST_WITHIN on GEO_POINT type literalPolygonNotWithinLiteralPoint -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -799,7 +799,7 @@ wkt:keyword | pt:geo_point ; literalPointWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -813,7 +813,7 @@ wkt:keyword | pt:geo_point ; literalPolygonNotWithinLiteralPointOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW within = ST_WITHIN(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -823,7 +823,7 @@ false ; literalPointWithinLiteralPolygonOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW within = ST_WITHIN(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -833,7 +833,7 @@ true ; pointWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_within-airports[] FROM airports @@ -848,7 +848,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; airportCityLocationPointWithin -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) @@ -859,7 +859,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointWithinCentroid -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) @@ -874,7 +874,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for Equality and casting with GEO_POINT geoPointEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -891,7 +891,7 @@ wkt:keyword |pt:geo_point ; geoPointNotEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-not-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -908,7 +908,7 @@ wkt:keyword |pt:geo_point ; convertFromStringParseError -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-str-parse-error[] row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] @@ -936,7 +936,7 @@ wkt:keyword |pt:geo_point ############################################### convertCartesianFromString -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -953,7 +953,7 @@ wkt:keyword |pt:cartesian_point ; convertCartesianFromStringArray -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianpoint(wkt); @@ -963,7 +963,7 @@ wkt:keyword |pt:cartesian_point ; centroidCartesianFromStringNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg row wkt = "POINT(4297.10986328125 -1475.530029296875)" | STATS c = ST_CENTROID_AGG(TO_CARTESIANPOINT(wkt)); @@ -973,7 +973,7 @@ POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString1 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] | MV_EXPAND wkt @@ -985,7 +985,7 @@ POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString2 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] | MV_EXPAND wkt @@ -997,7 +997,7 @@ POINT(5939.02001953125 398.6199951171875) ; centroidFromCartesianString3 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] | MV_EXPAND wkt @@ -1009,7 +1009,7 @@ POINT(3949.163965353159 1078.2645465797348) ; stXFromCartesianString -required_feature: esql.st_x_y +required_capability: st_x_y ROW point = TO_CARTESIANPOINT("POINT(4297.10986328125 -1475.530029296875)") | EVAL x = ST_X(point), y = ST_Y(point) @@ -1020,7 +1020,7 @@ POINT(4297.10986328125 -1475.530029296875) | 4297.10986328125 | -1475.530029296 ; simpleCartesianLoad -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; @@ -1039,7 +1039,7 @@ ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l # Tests for ST_CENTROID on CARTESIAN_POINT type cartesianCentroidFromAirports -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location); @@ -1049,7 +1049,7 @@ POINT(-266681.67563861894 3053301.5120195406) ; cartesianCentroidFromAirportsNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(TO_CARTESIANPOINT(location)); @@ -1059,7 +1059,7 @@ POINT (-266681.66530554957 3053301.506061676) ; cartesianCentroidFromAirportsCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() @@ -1070,7 +1070,7 @@ POINT(-266681.67563861894 3053301.5120195406) | 849 ; cartesianCentroidFromAirportsCountGrouped -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -1089,7 +1089,7 @@ POINT(140136.12878224207 3081220.7881944445) | 63 | 2 ; cartesianCentroidFromAirportsFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | WHERE scalerank == 9 @@ -1101,7 +1101,7 @@ POINT(9289013.153846154 3615537.0533353365) | 26 ; cartesianCentroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | WHERE scalerank == 9 @@ -1115,7 +1115,7 @@ POINT(9003597.4375 3429344.0078125) | 8 ; cartesianCentroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -1130,7 +1130,7 @@ POINT (726480.0130685265 3359566.331716279) | 849 # Tests for ST_INTERSECTS on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterIntersectsPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1142,7 +1142,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1153,7 +1153,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1167,7 +1167,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointIntersectsPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1178,7 +1178,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointIntersectsPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1189,7 +1189,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointIntersectsMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1202,7 +1202,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointIntersectsLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1215,7 +1215,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointIntersectsMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1229,7 +1229,7 @@ ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | ; cartesianPointIntersectsPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1241,7 +1241,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1253,7 +1253,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsLiteralPolygonCount -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1268,7 +1268,7 @@ count:long # Tests for ST_DISJOINT on CARTESIAN_POINT type literalPolygonDisjointLiteralCartesianPoint -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1282,7 +1282,7 @@ wkt:keyword | pt:cartesian_point ; literalCartesianPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1296,7 +1296,7 @@ wkt:keyword | pt:cartesian_point ; literalPolygonDisjointLiteralCartesianPointOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_CARTESIANPOINT("POINT(0 0)")) ; @@ -1306,7 +1306,7 @@ false ; literalCartesianPointDisjointLiteralPolygonOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_CARTESIANPOINT("POINT(-1 0)"), TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -1316,7 +1316,7 @@ true ; cartesianPointDisjointLiteralPolygonCount -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1328,7 +1328,7 @@ count:long ; cartesianPointIntersectsDisjointLiteralPolygonCount -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | EVAL intersects = ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1344,7 +1344,7 @@ false | true | 405 ; cartesianPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1365,7 +1365,7 @@ x:double | y:double | count:long ; cartesianPointDisjointEmptyGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("LINESTRING()")) @@ -1380,7 +1380,7 @@ count:long ; cartesianPointDisjointInvalidGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("Invalid Geometry")) @@ -1398,7 +1398,7 @@ count:long # Tests for ST_CONTAINS on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) @@ -1410,7 +1410,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) @@ -1421,7 +1421,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1435,7 +1435,7 @@ wkt:keyword | pt:cartesian_point ; cartesianCentroidFromAirportsAfterPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1447,7 +1447,7 @@ POINT (NaN NaN) | 0 ; cartesianPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1457,7 +1457,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; literalCartesianPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1469,7 +1469,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointContainsPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1480,7 +1480,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointContainsPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1491,7 +1491,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointContainsMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1502,7 +1502,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1513,7 +1513,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1524,7 +1524,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1536,7 +1536,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointContainsPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1551,7 +1551,7 @@ POINT (4783520.5 1661010.0) | 1 # Tests for ST_WITHIN on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterWithinPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1563,7 +1563,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1574,7 +1574,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPointWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1588,7 +1588,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointWithinPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1599,7 +1599,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointWithinPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1610,7 +1610,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointWithinMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1623,7 +1623,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointWithinLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1636,7 +1636,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointWithinMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1650,7 +1650,7 @@ ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | ; cartesianPointWithinPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1662,7 +1662,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointWithinPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1677,7 +1677,7 @@ POINT (4783520.5 1661010.0) | 1 # Tests for Equality and casting with GEO_POINT cartesianPointEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -1694,7 +1694,7 @@ wkt:keyword |pt:cartesian_point ; cartesianPointNotEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-not-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -1711,7 +1711,7 @@ wkt:keyword |pt:cartesian_point ; convertCartesianFromStringParseError -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-str-parse-error[] row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index 6d0d15c398986..dd092130c3406 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -3,7 +3,7 @@ # convertFromString -required_feature: esql.spatial_shapes +required_capability: spatial_shapes // tag::to_geoshape-str[] ROW wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" @@ -18,7 +18,7 @@ wkt:keyword | geom:geo_shape ; convertFromStringArray -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geoshape(wkt); @@ -28,7 +28,7 @@ wkt:keyword ; convertFromStringViaPoint -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = "POINT (30 10)" | EVAL point = TO_GEOPOINT(wkt) @@ -41,7 +41,7 @@ wkt:keyword | point:geo_point | shape:geo_shape # need to work out how to upload WKT simpleLoad -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM countries_bbox | WHERE id == "ISL"; @@ -50,7 +50,7 @@ ISL|Iceland|BBOX(-24.538400, -13.499446, 66.536100, 63.390000) ; simpleLoadPointsAsShapes -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM airports | WHERE abbrev == "CPH" OR abbrev == "VLC" @@ -80,7 +80,7 @@ CPH | Københavns Kommune | POINT(12.5683 55.6761) | Copenhagen # Tests for ST_INTERSECTS with GEO_SHAPE pointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL location = TO_GEOSHAPE(location) @@ -93,7 +93,7 @@ HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen ; polygonIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | WHERE ST_INTERSECTS(city_boundary, TO_GEOSHAPE("POLYGON((109.4 18.1, 109.6 18.1, 109.6 18.3, 109.4 18.3, 109.4 18.1))")) @@ -106,7 +106,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 ; pointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL location = TO_GEOSHAPE(location) @@ -119,7 +119,7 @@ HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen ; literalPointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -133,7 +133,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -147,7 +147,7 @@ wkt:keyword | pt:geo_point ; literalPointAsShapeIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -161,7 +161,7 @@ wkt:keyword | pt:geo_shape ; literalPointAsShapeIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -175,7 +175,7 @@ wkt:keyword | pt:geo_shape ; shapeIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM countries_bbox | WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((29 -30, 31 -30, 31 -27.3, 29 -27.3, 29 -30))")) @@ -189,7 +189,7 @@ LSO | Lesotho | BBOX(27.013973, 29.455554, -28.570691, -30.650527) ; literalPolygonIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))", "POLYGON((20 60, 6 60, 6 66, 20 66, 20 60))"] | EVAL other = TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))") @@ -204,7 +204,7 @@ wkt:keyword | shape:geo_shape ; literalPolygonIntersectsLiteralPolygonOneRow -required_feature: esql.st_intersects +required_capability: st_intersects ROW intersects = ST_INTERSECTS(TO_GEOSHAPE("POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))"), TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))")) ; @@ -217,7 +217,7 @@ true # Tests for ST_DISJOINT with GEO_SHAPE polygonDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint // tag::st_disjoint-airport_city_boundaries[] FROM airport_city_boundaries @@ -238,7 +238,7 @@ ACA | General Juan N Alvarez Int'l | Acapulco de Juárez | Acapulco d # Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE polygonContainsLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_contains-airport_city_boundaries[] FROM airport_city_boundaries @@ -255,7 +255,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 ; polygonWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_within-airport_city_boundaries[] FROM airport_city_boundaries @@ -275,7 +275,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 # Tests for Equality and casting with GEO_SHAPE geo_shapeEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -288,7 +288,7 @@ wkt:keyword |pt:geo_shape ; geo_shapeNotEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -301,7 +301,7 @@ wkt:keyword |pt:geo_shape ; convertFromStringParseError -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] | mv_expand wkt @@ -323,7 +323,7 @@ wkt:keyword |pt:geo_shape # convertCartesianShapeFromString -required_feature: esql.spatial_shapes +required_capability: spatial_shapes // tag::to_cartesianshape-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))"] @@ -340,7 +340,7 @@ wkt:keyword |geom:cartesian_shape ; convertCartesianFromStringArray -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianshape(wkt); @@ -350,7 +350,7 @@ wkt:keyword ; convertCartesianFromStringViaPoint -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = "POINT (3010 -1010)" | EVAL point = TO_CARTESIANPOINT(wkt) @@ -363,7 +363,7 @@ wkt:keyword | point:cartesian_point | shape:cartesian_shape # need to work out how to upload WKT simpleCartesianShapeLoad -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM countries_bbox_web | WHERE id == "ISL"; @@ -372,7 +372,7 @@ ISL|Iceland|BBOX(-2731602.192501422, -1502751.454502109, 1.0025136653899286E7, 9 ; simpleLoadCartesianPointsAsShapes -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM airports_web | WHERE abbrev == "CPH" OR abbrev == "VLC" @@ -389,7 +389,7 @@ abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cart # Tests for ST_INTERSECTS with CARTESIAN_SHAPE cartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | EVAL location = TO_CARTESIANSHAPE(location) @@ -402,7 +402,7 @@ HOD | Hodeidah Int'l | POINT (4783520.559160681 1661010.0197476079) | ; literalCartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -416,7 +416,7 @@ wkt:keyword | pt:cartesian_shape ; cartesianShapeIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM countries_bbox_web | WHERE ST_INTERSECTS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -430,7 +430,7 @@ LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117. ; literalCartesianPolygonIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))", "POLYGON((2000 6000, 600 6000, 600 6600, 2000 6600, 2000 6000))"] | MV_EXPAND wkt @@ -447,7 +447,7 @@ wkt:keyword | shape:ca # Tests for ST_DISJOINT with CARTESIAN_SHAPE cartesianPolygonDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM countries_bbox_web | WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -460,7 +460,7 @@ ZWE | Zimbabwe | BBOX (2809472.180051312, 3681512.6693309383, -176035 ; cartesianPolygonDisjointEmptyGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM countries_bbox_web | WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("LINESTRING()")) @@ -478,7 +478,7 @@ count:long # Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE cartesianShapeContainsPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM countries_bbox_web | WHERE ST_CONTAINS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -490,7 +490,7 @@ ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908 ; cartesianShapeWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM countries_bbox_web | WHERE ST_WITHIN(shape, TO_CARTESIANSHAPE("POLYGON((1800000 -2500000, 4300000 -2500000, 4300000 -6000000, 1800000 -6000000, 1800000 -2500000))")) @@ -507,7 +507,7 @@ LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117. # Tests for Equality and casting with CARTESIAN_SHAPE cartesianshapeEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -520,7 +520,7 @@ wkt:keyword |pt:cartesian_shape ; cartesianShapeNotEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -533,7 +533,7 @@ wkt:keyword |pt:cartesian_shape ; convertCartesianShapeFromStringParseError -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] | mv_expand wkt diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5bdf0bd963fee..6322746318230 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -71,7 +71,7 @@ emp_no:integer | last_name:keyword | gender:keyword | f_l:boolean ; stringCast -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | eval ss = substring("abcd", "2"), l = left("abcd", "2"), r = right("abcd", "2"); @@ -80,7 +80,7 @@ a:integer | ss:keyword | l:keyword | r:keyword ; stringCastEmp -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | eval ss = substring(first_name, "2") @@ -330,7 +330,7 @@ emp_no:integer | name:keyword // Note: no matches in MV returned in -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true @@ -522,7 +522,7 @@ emp_no:integer |positions:keyword ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded. @@ -535,7 +535,7 @@ emp_no:integer |job_positions:keyword ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded. @@ -552,7 +552,7 @@ emp_no:integer |job_positions:keyword ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded. @@ -564,7 +564,7 @@ emp_no:integer |job_positions:keyword ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions] failed, treating result as null. Only first 20 failures recorded. @@ -577,7 +577,7 @@ emp_no:integer |job_positions:keyword ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions in (\"Accountant\", \"Tech Lead\")] failed, treating result as null. Only first 20 failures recorded. @@ -590,7 +590,7 @@ emp_no:integer |job_positions:keyword ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions < \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded.] @@ -607,7 +607,7 @@ emp_no:integer |job_positions:keyword ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions > \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded.] @@ -620,7 +620,7 @@ emp_no:integer |job_positions:keyword ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions == \"Accountant\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded.] @@ -745,7 +745,7 @@ ROW a=[10, 9, 8] ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = ["Mon", "Tues", "Wed", "Thu", "Fri"] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); @@ -754,7 +754,7 @@ a:keyword | sa:keyword | sd:keyword ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(job_positions, "DESC"), sa = mv_sort(job_positions) @@ -772,7 +772,7 @@ emp_no:integer | job_positions:keyword ; mvSliceCast -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = ["1", "2", "3", "4"] | eval a1 = mv_slice(a, "0", "1"); @@ -782,7 +782,7 @@ a:keyword | a1:keyword ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.keyword, 0, 1) @@ -799,7 +799,7 @@ emp_no:integer | salary_change.keyword:keyword | a1:keyword ; mvZip -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_zip[] ROW a = ["x", "y", "z"], b = ["1", "2"] @@ -815,7 +815,7 @@ a:keyword | b:keyword | c:keyword ; mvZipEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval full_name = mv_zip(first_name, last_name, " "), full_name_2 = mv_zip(last_name, first_name), jobs = mv_zip(job_positions, salary_change.keyword, "#") @@ -842,7 +842,7 @@ beta | Kubernetes cluster | [beta k8s server, beta k8s server2 ; lengthOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true @@ -856,7 +856,7 @@ null | 19 ; startsWithText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true @@ -870,7 +870,7 @@ false | null ; substringOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true @@ -884,7 +884,7 @@ Gatew | null ; concatOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. @@ -1150,7 +1150,7 @@ a:keyword | upper:keyword | lower:keyword ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1162,7 +1162,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values // tag::values-grouped[] FROM employees @@ -1314,7 +1314,7 @@ min(f_l):integer | max(f_l):integer | job_positions:keyword ; locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; ignoreOrder:true @@ -1328,7 +1328,7 @@ null | 0 ; base64Encode#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode // tag::to_base64[] row a = "elastic" @@ -1343,7 +1343,7 @@ elastic | ZWxhc3RpYw== ; base64Decode#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode // tag::from_base64[] row a = "ZWxhc3RpYw==" @@ -1358,7 +1358,7 @@ ZWxhc3RpYw== | elastic ; base64EncodeDecodeEmp#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode from employees | where emp_no < 10032 and emp_no > 10027 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index fa524d270bb98..38f3d439e7504 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -46,7 +46,7 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; ; filterPushDownGT -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +68,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterPushDownRange -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. @@ -84,7 +84,7 @@ warning:#[Emulated:Line 1:67: java.lang.IllegalArgumentException: single-value f ; filterPushDownIn -required_feature: esql.mv_warn +required_capability: mv_warn // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; @@ -98,7 +98,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterOnFieldsEquality -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in == bytes_out; warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. @@ -109,7 +109,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterOnFieldsInequality -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; warning:Line 1:32: evaluation of [bytes_in < bytes_out] failed, treating result as null. Only first 20 failures recorded. @@ -140,7 +140,7 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des ; case -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); warning:Line 1:27: evaluation of [bytes_in == to_ul(154551962150890564)] failed, treating result as null. Only first 20 failures recorded. @@ -151,7 +151,7 @@ warning:Line 1:27: java.lang.IllegalArgumentException: single-value function enc ; toDegrees -required_feature: esql.mv_warn +required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; @@ -163,7 +163,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; toRadians -required_feature: esql.mv_warn +required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec index 513189cc0fe86..3b6c41f883018 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec @@ -312,7 +312,7 @@ null | null | null | 11 | 0 | 1.3.0 | 0.1 | no ; values -required_feature: esql.agg_values +required_capability: agg_values FROM apps | STATS version=MV_SORT(VALUES(version)) @@ -323,7 +323,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM apps | EVAL name=SUBSTRING(name, 0, 1) @@ -348,7 +348,7 @@ version:version | name:keyword ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM apps | STATS version=MV_SORT(VALUES(version)) BY name @@ -372,7 +372,7 @@ version:version | name:keyword ; implictCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version == "1.2.3.4" | sort name | keep name, version; name:keyword | version:version @@ -381,7 +381,7 @@ hhhhh | 1.2.3.4 ; implictCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version != "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -390,7 +390,7 @@ bbbbb | 2.1 ; implictCastingGreaterThan -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version > "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -399,7 +399,7 @@ ccccc | 2.3.4 ; implictCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version <= "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -408,7 +408,7 @@ aaaaa | 1.2.3.4 ; implictCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version in ( "1.2.3.4", "bad" ) | sort name | keep name, version; name:keyword | version:version diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 1bc9bd4766c2e..686fb831aa042 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -121,7 +121,6 @@ public void testRow() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107347") public void testFromStatsGroupingAvgWithSort() { testFromStatsGroupingAvgImpl("from test | stats avg(count) by data | sort data | limit 2", "data", "avg(count)"); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index a1cd71da6c63b..d18bf0e23fd29 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -81,6 +81,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { @Before public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); + nodeLevelReduction = randomBoolean(); READ_DESCRIPTION = """ \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] \\_ValuesSourceReaderOperator[fields = [pause_me]] @@ -92,10 +93,10 @@ public void setup() { \\_ProjectOperator[projection = [0]] \\_LimitOperator[limit = 1000] \\_OutputOperator[columns = [sum(pause_me)]]"""; - REDUCE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_ExchangeSinkOperator"""; - nodeLevelReduction = randomBoolean(); + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + } public void testTaskContents() throws Exception { @@ -480,6 +481,37 @@ public void testTaskContentsForLimitQuery() throws Exception { } } + public void testTaskContentsForGroupingStatsQuery() throws Exception { + READ_DESCRIPTION = """ + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] + \\_ValuesSourceReaderOperator[fields = [foo]] + \\_OrdinalsGroupingOperator(aggs = max of longs) + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); + MERGE_DESCRIPTION = """ + \\_ExchangeSourceOperator[] + \\_HashAggregationOperator[mode = , aggs = max of longs] + \\_ProjectOperator[projection = [1, 0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [max(foo), pause_me]]"""; + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_HashAggregationOperator[mode = , aggs = max of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + + ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + scriptPermits.release(numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + var it = Iterators.flatMap(esqlResponse.values(), i -> i); + assertThat(it.next(), equalTo(numberOfDocs() - 1L)); // max of numberOfDocs() generated int values + assertThat(it.next(), equalTo(1L)); // pause_me always emits 1 + } + } + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java index 4bbcff44ec740..e005e2143522b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.FollowersChecker; import org.elasticsearch.cluster.coordination.LeaderChecker; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; @@ -91,6 +93,21 @@ private EsqlQueryResponse runQueryWithDisruption(EsqlQueryRequest request) { try { return future.actionGet(2, TimeUnit.MINUTES); } catch (Exception e) { + logger.info( + "running tasks: {}", + client().admin() + .cluster() + .prepareListTasks() + .get() + .getTasks() + .stream() + .filter( + // Skip the tasks we that'd get in the way while debugging + t -> false == t.action().contains(TransportListTasksAction.TYPE.name()) + && false == t.action().contains(HealthNode.TASK_NAME) + ) + .toList() + ); assertTrue("request must be failed or completed after clearing disruption", future.isDone()); ensureBlocksReleased(); logger.info("--> failed to execute esql query with disruption; retrying...", e); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index 406361438fc42..f82e554623085 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -8,9 +8,17 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; public class TimeSeriesIT extends AbstractEsqlIntegTestCase { @@ -37,6 +45,48 @@ public void testEmpty() { "type=long,time_series_metric=gauge" ) .get(); - run("FROM pods | LIMIT 1").close(); + run("METRICS pods | LIMIT 1").close(); + } + + public void testSimpleMetrics() { + Settings settings = Settings.builder().put("mode", "time_series").putList("routing_path", List.of("pod")).build(); + client().admin() + .indices() + .prepareCreate("pods") + .setSettings(settings) + .setMapping( + "@timestamp", + "type=date", + "pod", + "type=keyword,time_series_dimension=true", + "cpu", + "type=double,time_series_metric=gauge" + ) + .get(); + List pods = List.of("p1", "p2", "p3"); + long startTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-04-15T00:00:00Z"); + int numDocs = between(10, 10); + Map> cpus = new HashMap<>(); + for (int i = 0; i < numDocs; i++) { + String pod = randomFrom(pods); + int cpu = randomIntBetween(0, 100); + cpus.computeIfAbsent(pod, k -> new ArrayList<>()).add(cpu); + long timestamp = startTime + (1000L * i); + client().prepareIndex("pods").setSource("@timestamp", timestamp, "pod", pod, "cpu", cpu).get(); + } + List sortedGroups = cpus.keySet().stream().sorted().toList(); + client().admin().indices().prepareRefresh("pods").get(); + try (EsqlQueryResponse resp = run("METRICS pods load=avg(cpu) BY pod | SORT pod")) { + List> rows = EsqlTestUtils.getValuesList(resp); + assertThat(rows, hasSize(sortedGroups.size())); + for (int i = 0; i < rows.size(); i++) { + List r = rows.get(i); + String pod = (String) r.get(1); + assertThat(pod, equalTo(sortedGroups.get(i))); + List values = cpus.get(pod); + double avg = values.stream().mapToDouble(n -> n).sum() / values.size(); + assertThat((double) r.get(0), equalTo(avg)); + } + } } } diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index c4a3dc7c56615..9f005db107aef 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -11,6 +11,7 @@ INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION_MODE); KEEP : 'keep' -> pushMode(PROJECT_MODE); LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); META : 'meta' -> pushMode(META_MODE); +METRICS : 'metrics' -> pushMode(METRICS_MODE); MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); RENAME : 'rename' -> pushMode(RENAME_MODE); ROW : 'row' -> pushMode(EXPRESSION_MODE); @@ -31,6 +32,16 @@ MULTILINE_COMMENT WS : [ \r\n\t]+ -> channel(HIDDEN) ; + +fragment INDEX_UNQUOTED_IDENTIFIER_PART + : ~[=`|,[\]/ \t\r\n] + | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment + ; + +INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER_PART+ + ; + // // Explain // @@ -192,17 +203,8 @@ FROM_QUOTED_STRING : QUOTED_STRING -> type(QUOTED_STRING); OPTIONS : 'options'; METADATA : 'metadata'; -fragment FROM_UNQUOTED_IDENTIFIER_PART - : ~[=`|,[\]/ \t\r\n] - | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment - ; - -FROM_UNQUOTED_IDENTIFIER - : FROM_UNQUOTED_IDENTIFIER_PART+ - ; - -FROM_QUOTED_IDENTIFIER - : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) +FROM_INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER) ; FROM_LINE_COMMENT @@ -428,3 +430,60 @@ SETTING_WS : WS -> channel(HIDDEN) ; + +// +// METRICS command +// +mode METRICS_MODE; +METRICS_PIPE : PIPE -> type(PIPE), popMode; + +METRICS_INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER), popMode, pushMode(CLOSING_METRICS_MODE) + ; + +METRICS_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +METRICS_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +METRICS_WS + : WS -> channel(HIDDEN) + ; + +// TODO: remove this workaround mode - see https://github.com/elastic/elasticsearch/issues/108528 +mode CLOSING_METRICS_MODE; + +CLOSING_METRICS_COMMA + : COMMA -> type(COMMA), popMode, pushMode(METRICS_MODE) + ; + +CLOSING_METRICS_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +CLOSING_METRICS_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +CLOSING_METRICS_WS + : WS -> channel(HIDDEN) + ; + +CLOSING_METRICS_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> popMode, pushMode(EXPRESSION_MODE), type(QUOTED_IDENTIFIER) + ; + +CLOSING_METRICS_UNQUOTED_IDENTIFIER + :UNQUOTED_IDENTIFIER -> popMode, pushMode(EXPRESSION_MODE), type(UNQUOTED_IDENTIFIER) + ; + +CLOSING_METRICS_BY + :BY -> popMode, pushMode(EXPRESSION_MODE), type(BY) + ; + +CLOSING_METRICS_PIPE + : PIPE -> type(PIPE), popMode + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index b496aa68b61f7..15a8356d1b943 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -9,105 +9,112 @@ INLINESTATS=8 KEEP=9 LIMIT=10 META=11 -MV_EXPAND=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -LAST=40 -LP=41 -IN=42 -IS=43 -LIKE=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -OPTIONS=72 -METADATA=73 -FROM_UNQUOTED_IDENTIFIER=74 -FROM_LINE_COMMENT=75 -FROM_MULTILINE_COMMENT=76 -FROM_WS=77 -ID_PATTERN=78 -PROJECT_LINE_COMMENT=79 -PROJECT_MULTILINE_COMMENT=80 -PROJECT_WS=81 -AS=82 -RENAME_LINE_COMMENT=83 -RENAME_MULTILINE_COMMENT=84 -RENAME_WS=85 -ON=86 -WITH=87 -ENRICH_POLICY_NAME=88 -ENRICH_LINE_COMMENT=89 -ENRICH_MULTILINE_COMMENT=90 -ENRICH_WS=91 -ENRICH_FIELD_LINE_COMMENT=92 -ENRICH_FIELD_MULTILINE_COMMENT=93 -ENRICH_FIELD_WS=94 -MVEXPAND_LINE_COMMENT=95 -MVEXPAND_MULTILINE_COMMENT=96 -MVEXPAND_WS=97 -INFO=98 -SHOW_LINE_COMMENT=99 -SHOW_MULTILINE_COMMENT=100 -SHOW_WS=101 -FUNCTIONS=102 -META_LINE_COMMENT=103 -META_MULTILINE_COMMENT=104 -META_WS=105 -COLON=106 -SETTING=107 -SETTING_LINE_COMMENT=108 -SETTTING_MULTILINE_COMMENT=109 -SETTING_WS=110 +METRICS=12 +MV_EXPAND=13 +RENAME=14 +ROW=15 +SHOW=16 +SORT=17 +STATS=18 +WHERE=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +INDEX_UNQUOTED_IDENTIFIER=24 +EXPLAIN_WS=25 +EXPLAIN_LINE_COMMENT=26 +EXPLAIN_MULTILINE_COMMENT=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COMMA=37 +DESC=38 +DOT=39 +FALSE=40 +FIRST=41 +LAST=42 +LP=43 +IN=44 +IS=45 +LIKE=46 +NOT=47 +NULL=48 +NULLS=49 +OR=50 +PARAM=51 +RLIKE=52 +RP=53 +TRUE=54 +EQ=55 +CIEQ=56 +NEQ=57 +LT=58 +LTE=59 +GT=60 +GTE=61 +PLUS=62 +MINUS=63 +ASTERISK=64 +SLASH=65 +PERCENT=66 +OPENING_BRACKET=67 +CLOSING_BRACKET=68 +UNQUOTED_IDENTIFIER=69 +QUOTED_IDENTIFIER=70 +EXPR_LINE_COMMENT=71 +EXPR_MULTILINE_COMMENT=72 +EXPR_WS=73 +OPTIONS=74 +METADATA=75 +FROM_LINE_COMMENT=76 +FROM_MULTILINE_COMMENT=77 +FROM_WS=78 +ID_PATTERN=79 +PROJECT_LINE_COMMENT=80 +PROJECT_MULTILINE_COMMENT=81 +PROJECT_WS=82 +AS=83 +RENAME_LINE_COMMENT=84 +RENAME_MULTILINE_COMMENT=85 +RENAME_WS=86 +ON=87 +WITH=88 +ENRICH_POLICY_NAME=89 +ENRICH_LINE_COMMENT=90 +ENRICH_MULTILINE_COMMENT=91 +ENRICH_WS=92 +ENRICH_FIELD_LINE_COMMENT=93 +ENRICH_FIELD_MULTILINE_COMMENT=94 +ENRICH_FIELD_WS=95 +MVEXPAND_LINE_COMMENT=96 +MVEXPAND_MULTILINE_COMMENT=97 +MVEXPAND_WS=98 +INFO=99 +SHOW_LINE_COMMENT=100 +SHOW_MULTILINE_COMMENT=101 +SHOW_WS=102 +FUNCTIONS=103 +META_LINE_COMMENT=104 +META_MULTILINE_COMMENT=105 +META_WS=106 +COLON=107 +SETTING=108 +SETTING_LINE_COMMENT=109 +SETTTING_MULTILINE_COMMENT=110 +SETTING_WS=111 +METRICS_LINE_COMMENT=112 +METRICS_MULTILINE_COMMENT=113 +METRICS_WS=114 +CLOSING_METRICS_LINE_COMMENT=115 +CLOSING_METRICS_MULTILINE_COMMENT=116 +CLOSING_METRICS_WS=117 'dissect'=1 'drop'=2 'enrich'=3 @@ -119,55 +126,56 @@ SETTING_WS=110 'keep'=9 'limit'=10 'meta'=11 -'mv_expand'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'last'=40 -'('=41 -'in'=42 -'is'=43 -'like'=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'options'=72 -'metadata'=73 -'as'=82 -'on'=86 -'with'=87 -'info'=98 -'functions'=102 -':'=106 +'metrics'=12 +'mv_expand'=13 +'rename'=14 +'row'=15 +'show'=16 +'sort'=17 +'stats'=18 +'where'=19 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +','=37 +'desc'=38 +'.'=39 +'false'=40 +'first'=41 +'last'=42 +'('=43 +'in'=44 +'is'=45 +'like'=46 +'not'=47 +'null'=48 +'nulls'=49 +'or'=50 +'?'=51 +'rlike'=52 +')'=53 +'true'=54 +'=='=55 +'=~'=56 +'!='=57 +'<'=58 +'<='=59 +'>'=60 +'>='=61 +'+'=62 +'-'=63 +'*'=64 +'/'=65 +'%'=66 +']'=68 +'options'=74 +'metadata'=75 +'as'=83 +'on'=87 +'with'=88 +'info'=99 +'functions'=103 +':'=107 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 62dcc6ebd484b..e023991b74187 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -23,6 +23,7 @@ sourceCommand : explainCommand | fromCommand | rowCommand + | metricsCommand | showCommand | metaCommand ; @@ -104,12 +105,11 @@ field ; fromCommand - : FROM fromIdentifier (COMMA fromIdentifier)* metadata? fromOptions? + : FROM indexIdentifier (COMMA indexIdentifier)* metadata? fromOptions? ; -fromIdentifier - : FROM_UNQUOTED_IDENTIFIER - | QUOTED_IDENTIFIER +indexIdentifier + : INDEX_UNQUOTED_IDENTIFIER ; fromOptions @@ -126,13 +126,17 @@ metadata ; metadataOption - : METADATA fromIdentifier (COMMA fromIdentifier)* + : METADATA indexIdentifier (COMMA indexIdentifier)* ; deprecated_metadata : OPENING_BRACKET metadataOption CLOSING_BRACKET ; +metricsCommand + : METRICS indexIdentifier (COMMA indexIdentifier)* aggregates=fields? (BY grouping=fields)? + ; + evalCommand : EVAL fields ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index b496aa68b61f7..15a8356d1b943 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -9,105 +9,112 @@ INLINESTATS=8 KEEP=9 LIMIT=10 META=11 -MV_EXPAND=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -LAST=40 -LP=41 -IN=42 -IS=43 -LIKE=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -OPTIONS=72 -METADATA=73 -FROM_UNQUOTED_IDENTIFIER=74 -FROM_LINE_COMMENT=75 -FROM_MULTILINE_COMMENT=76 -FROM_WS=77 -ID_PATTERN=78 -PROJECT_LINE_COMMENT=79 -PROJECT_MULTILINE_COMMENT=80 -PROJECT_WS=81 -AS=82 -RENAME_LINE_COMMENT=83 -RENAME_MULTILINE_COMMENT=84 -RENAME_WS=85 -ON=86 -WITH=87 -ENRICH_POLICY_NAME=88 -ENRICH_LINE_COMMENT=89 -ENRICH_MULTILINE_COMMENT=90 -ENRICH_WS=91 -ENRICH_FIELD_LINE_COMMENT=92 -ENRICH_FIELD_MULTILINE_COMMENT=93 -ENRICH_FIELD_WS=94 -MVEXPAND_LINE_COMMENT=95 -MVEXPAND_MULTILINE_COMMENT=96 -MVEXPAND_WS=97 -INFO=98 -SHOW_LINE_COMMENT=99 -SHOW_MULTILINE_COMMENT=100 -SHOW_WS=101 -FUNCTIONS=102 -META_LINE_COMMENT=103 -META_MULTILINE_COMMENT=104 -META_WS=105 -COLON=106 -SETTING=107 -SETTING_LINE_COMMENT=108 -SETTTING_MULTILINE_COMMENT=109 -SETTING_WS=110 +METRICS=12 +MV_EXPAND=13 +RENAME=14 +ROW=15 +SHOW=16 +SORT=17 +STATS=18 +WHERE=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +INDEX_UNQUOTED_IDENTIFIER=24 +EXPLAIN_WS=25 +EXPLAIN_LINE_COMMENT=26 +EXPLAIN_MULTILINE_COMMENT=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COMMA=37 +DESC=38 +DOT=39 +FALSE=40 +FIRST=41 +LAST=42 +LP=43 +IN=44 +IS=45 +LIKE=46 +NOT=47 +NULL=48 +NULLS=49 +OR=50 +PARAM=51 +RLIKE=52 +RP=53 +TRUE=54 +EQ=55 +CIEQ=56 +NEQ=57 +LT=58 +LTE=59 +GT=60 +GTE=61 +PLUS=62 +MINUS=63 +ASTERISK=64 +SLASH=65 +PERCENT=66 +OPENING_BRACKET=67 +CLOSING_BRACKET=68 +UNQUOTED_IDENTIFIER=69 +QUOTED_IDENTIFIER=70 +EXPR_LINE_COMMENT=71 +EXPR_MULTILINE_COMMENT=72 +EXPR_WS=73 +OPTIONS=74 +METADATA=75 +FROM_LINE_COMMENT=76 +FROM_MULTILINE_COMMENT=77 +FROM_WS=78 +ID_PATTERN=79 +PROJECT_LINE_COMMENT=80 +PROJECT_MULTILINE_COMMENT=81 +PROJECT_WS=82 +AS=83 +RENAME_LINE_COMMENT=84 +RENAME_MULTILINE_COMMENT=85 +RENAME_WS=86 +ON=87 +WITH=88 +ENRICH_POLICY_NAME=89 +ENRICH_LINE_COMMENT=90 +ENRICH_MULTILINE_COMMENT=91 +ENRICH_WS=92 +ENRICH_FIELD_LINE_COMMENT=93 +ENRICH_FIELD_MULTILINE_COMMENT=94 +ENRICH_FIELD_WS=95 +MVEXPAND_LINE_COMMENT=96 +MVEXPAND_MULTILINE_COMMENT=97 +MVEXPAND_WS=98 +INFO=99 +SHOW_LINE_COMMENT=100 +SHOW_MULTILINE_COMMENT=101 +SHOW_WS=102 +FUNCTIONS=103 +META_LINE_COMMENT=104 +META_MULTILINE_COMMENT=105 +META_WS=106 +COLON=107 +SETTING=108 +SETTING_LINE_COMMENT=109 +SETTTING_MULTILINE_COMMENT=110 +SETTING_WS=111 +METRICS_LINE_COMMENT=112 +METRICS_MULTILINE_COMMENT=113 +METRICS_WS=114 +CLOSING_METRICS_LINE_COMMENT=115 +CLOSING_METRICS_MULTILINE_COMMENT=116 +CLOSING_METRICS_WS=117 'dissect'=1 'drop'=2 'enrich'=3 @@ -119,55 +126,56 @@ SETTING_WS=110 'keep'=9 'limit'=10 'meta'=11 -'mv_expand'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'last'=40 -'('=41 -'in'=42 -'is'=43 -'like'=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'options'=72 -'metadata'=73 -'as'=82 -'on'=86 -'with'=87 -'info'=98 -'functions'=102 -':'=106 +'metrics'=12 +'mv_expand'=13 +'rename'=14 +'row'=15 +'show'=16 +'sort'=17 +'stats'=18 +'where'=19 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +','=37 +'desc'=38 +'.'=39 +'false'=40 +'first'=41 +'last'=42 +'('=43 +'in'=44 +'is'=45 +'like'=46 +'not'=47 +'null'=48 +'nulls'=49 +'or'=50 +'?'=51 +'rlike'=52 +')'=53 +'true'=54 +'=='=55 +'=~'=56 +'!='=57 +'<'=58 +'<='=59 +'>'=60 +'>='=61 +'+'=62 +'-'=63 +'*'=64 +'/'=65 +'%'=66 +']'=68 +'options'=74 +'metadata'=75 +'as'=83 +'on'=87 +'with'=88 +'info'=99 +'functions'=103 +':'=107 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java new file mode 100644 index 0000000000000..fa23466f54f83 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; +import org.elasticsearch.xpack.esql.plugin.EsqlFeatures; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +/** + * A {@link Set} of "capabilities" supported by the {@link RestEsqlQueryAction} + * and {@link RestEsqlAsyncQueryAction} APIs. These are exposed over the + * {@link RestNodesCapabilitiesAction} and we use them to enable tests. + */ +public class EsqlCapabilities { + static final Set CAPABILITIES = capabilities(); + + private static Set capabilities() { + /* + * Add all of our cluster features without the leading "esql." + */ + List caps = new ArrayList<>(); + for (NodeFeature feature : new EsqlFeatures().getFeatures()) { + caps.add(cap(feature)); + } + for (NodeFeature feature : new EsqlFeatures().getHistoricalFeatures().keySet()) { + caps.add(cap(feature)); + } + return Set.copyOf(caps); + } + + /** + * Convert a {@link NodeFeature} from {@link EsqlFeatures} into a + * capability. + */ + public static String cap(NodeFeature feature) { + assert feature.id().startsWith("esql."); + return feature.id().substring("esql.".length()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 49a0307a6599e..fdf39545a396b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -92,7 +92,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { boolean isRunning = false; boolean isAsync = false; Profile profile = null; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { asyncExecutionId = in.readOptionalString(); isRunning = in.readBoolean(); isAsync = in.readBoolean(); @@ -108,7 +108,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(asyncExecutionId); out.writeBoolean(isRunning); out.writeBoolean(isAsync); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 3f0289d49535a..ad47779fffbb6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -39,6 +39,11 @@ public List routes() { return List.of(new Route(POST, "/_query/async")); } + @Override + public Set supportedCapabilities() { + return EsqlCapabilities.CAPABILITIES; + } + @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { EsqlQueryRequest esqlRequest; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 97a7f8e0e9e7d..268966422ce56 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -39,6 +39,11 @@ public List routes() { return List.of(new Route(POST, "/_query")); } + @Override + public Set supportedCapabilities() { + return EsqlCapabilities.CAPABILITIES; + } + @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { EsqlQueryRequest esqlRequest; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index f00e69ddaabe4..1018a03762cce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -59,12 +60,28 @@ record Condition(Expression condition, Expression value) {} "unsigned_long", "version" }, description = """ - Accepts pairs of conditions and values. - The function returns the value that belongs to the first condition that evaluates to true.""" + Accepts pairs of conditions and values. The function returns the value that + belongs to the first condition that evaluates to `true`. + + If the number of arguments is odd, the last argument is the default value which + is returned when no condition matches. If the number of arguments is even, and + no condition matches, the function returns `null`.""", + examples = { + @Example(description = "Determine whether employees are monolingual, bilingual, or polyglot:", file = "docs", tag = "case"), + @Example( + description = "Calculate the total connection success rate based on log messages:", + file = "conditional", + tag = "docsCaseSuccessRate" + ), + @Example( + description = "Calculate an hourly error rate as a percentage of the total number of log messages:", + file = "conditional", + tag = "docsCaseHourlyErrorRate" + ) } ) public Case( Source source, - @Param(name = "condition", type = { "boolean" }) Expression first, + @Param(name = "condition", type = { "boolean" }, description = "A condition.") Expression first, @Param( name = "trueValue", type = { @@ -79,7 +96,9 @@ public Case( "long", "text", "unsigned_long", - "version" } + "version" }, + description = "The value that's returned when the corresponding condition is the first to evaluate to `true`. " + + "The default value is returned when no condition matches." ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 1794258402aed..b1c761a50d8be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,26 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the maximum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the maximum value from multiple columns. This is similar to <>\n" + + "except it is intended to run on multiple columns at once.", + note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " + + "When run on `boolean` columns this will return `true` if any values are `true`.", + examples = @Example(file = "math", tag = "greatest") ) public Greatest( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 6b4208f7b3d85..8b68196af68a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,24 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the minimum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the minimum value from multiple columns. " + + "This is similar to <> except it is intended to run on multiple columns at once.", + examples = @Example(file = "math", tag = "least") ) public Least( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 98dc0c7e83d93..8c39a29f67f95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -52,12 +52,12 @@ public Coalesce( @Param( name = "first", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Expression to evaluate" + description = "Expression to evaluate." ) Expression first, @Param( name = "rest", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Other expression to evaluate", + description = "Other expression to evaluate.", optional = true ) List rest ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 279f31e34ac95..31e0a86a1e3ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -111,7 +111,9 @@ private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponen @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry contains the second geometry.", + description = """ + Returns whether the first geometry contains the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") ) public SpatialContains( @@ -119,12 +121,16 @@ public SpatialContains( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java index 7833f93b6270f..7b85ebfea5ee2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -65,7 +65,10 @@ public class SpatialDisjoint extends SpatialRelatesFunction { @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns are disjoint.", + description = """ + Returns whether the two geometries or geometry columns are disjoint. + This is the inverse of the <> function. + In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅""", examples = @Example(file = "spatial_shapes", tag = "st_disjoint-airport_city_boundaries") ) public SpatialDisjoint( @@ -73,12 +76,16 @@ public SpatialDisjoint( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 810e3206ada73..462f3bce1aeea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -63,22 +63,27 @@ public class SpatialIntersects extends SpatialRelatesFunction { new CartesianShapeIndexer("ST_Intersects") ); - @FunctionInfo( - returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns intersect.", - examples = @Example(file = "spatial", tag = "st_intersects-airports") - ) + @FunctionInfo(returnType = { "boolean" }, description = """ + Returns true if two geometries intersect. + They intersect if they have any point in common, including their interior points + (points along lines or within polygons). + This is the inverse of the <> function. + In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅""", examples = @Example(file = "spatial", tag = "st_intersects-airports")) public SpatialIntersects( Source source, @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index ca285ca07e27b..1eaf1e31e5430 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -66,7 +66,9 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry is within the second geometry.", + description = """ + Returns whether the first geometry is within the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") ) public SpatialWithin( @@ -74,12 +76,16 @@ public SpatialWithin( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java index f86be9290fed1..f5ff933babc9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_X. */ public class StX extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the x-coordinate from a point geometry.") - public StX(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `x` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StX( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java index 759c23c73374a..48de97da4befb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_Y. */ public class StY extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the y-coordinate from a point geometry.") - public StY(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `y` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StY( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 0cfffb128e0cf..4a5748f26a07f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -540,7 +540,7 @@ static EnrichExec readEnrichExec(PlanStreamInput in) throws IOException { final String policyMatchField = in.readString(); final Map concreteIndices; final Enrich.Mode mode; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { mode = in.readEnum(Enrich.Mode.class); concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); } else { @@ -573,7 +573,7 @@ static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOEx out.writeString(enrich.matchType()); } out.writeString(enrich.policyMatchField()); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(enrich.mode()); out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); } else { @@ -824,19 +824,19 @@ static void writeEval(PlanStreamOutput out, Eval eval) throws IOException { static Enrich readEnrich(PlanStreamInput in) throws IOException { Enrich.Mode mode = Enrich.Mode.ANY; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { mode = in.readEnum(Enrich.Mode.class); } final Source source = in.readSource(); final LogicalPlan child = in.readLogicalPlanNode(); final Expression policyName = in.readExpression(); final NamedExpression matchField = in.readNamedExpression(); - if (in.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readString(); // discard the old policy name } final EnrichPolicy policy = new EnrichPolicy(in); final Map concreteIndices; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); } else { EsIndex esIndex = readEsIndex(in); @@ -849,7 +849,7 @@ static Enrich readEnrich(PlanStreamInput in) throws IOException { } static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(enrich.mode()); } @@ -857,11 +857,11 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException out.writeLogicalPlanNode(enrich.child()); out.writeExpression(enrich.policyName()); out.writeNamedExpression(enrich.matchField()); - if (out.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeString(BytesRefs.toString(enrich.policyName().fold())); // old policy name } enrich.policy().writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); } else { Map concreteIndices = enrich.concreteIndices(); @@ -1819,8 +1819,8 @@ static void writeLiteral(PlanStreamOutput out, Literal literal) throws IOExcepti */ private static Object mapFromLiteralValue(PlanStreamOutput out, DataType dataType, Object value) { if (dataType == GEO_POINT || dataType == CARTESIAN_POINT) { - // In 8.12.0 and earlier builds of 8.13 (pre-release) we serialized point literals as encoded longs, but now use WKB - if (out.getTransportVersion().before(TransportVersions.ESQL_PLAN_POINT_LITERAL_WKB)) { + // In 8.12.0 we serialized point literals as encoded longs, but now use WKB + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { if (value instanceof List list) { return list.stream().map(v -> mapFromLiteralValue(out, dataType, v)).toList(); } @@ -1836,8 +1836,8 @@ private static Object mapFromLiteralValue(PlanStreamOutput out, DataType dataTyp */ private static Object mapToLiteralValue(PlanStreamInput in, DataType dataType, Object value) { if (dataType == GEO_POINT || dataType == CARTESIAN_POINT) { - // In 8.12.0 and earlier builds of 8.13 (pre-release) we serialized point literals as encoded longs, but now use WKB - if (in.getTransportVersion().before(TransportVersions.ESQL_PLAN_POINT_LITERAL_WKB)) { + // In 8.12.0 we serialized point literals as encoded longs, but now use WKB + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { if (value instanceof List list) { return list.stream().map(v -> mapToLiteralValue(in, dataType, v)).toList(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 866093ef55a6c..899f745e50c3a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -11,6 +11,7 @@ null 'keep' 'limit' 'meta' +'metrics' 'mv_expand' 'rename' 'row' @@ -25,6 +26,7 @@ null null null null +null '|' null null @@ -80,7 +82,6 @@ null null null null -null 'as' null null @@ -110,6 +111,12 @@ null null null null +null +null +null +null +null +null token symbolic names: null @@ -124,6 +131,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -135,6 +143,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -186,7 +195,6 @@ EXPR_MULTILINE_COMMENT EXPR_WS OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -223,6 +231,12 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS rule names: DISSECT @@ -236,6 +250,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -247,6 +262,8 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER_PART +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_OPENING_BRACKET EXPLAIN_PIPE EXPLAIN_WS @@ -317,9 +334,7 @@ FROM_ASSIGN FROM_QUOTED_STRING OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER_PART -FROM_UNQUOTED_IDENTIFIER -FROM_QUOTED_IDENTIFIER +FROM_INDEX_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -385,6 +400,19 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_PIPE +METRICS_INDEX_UNQUOTED_IDENTIFIER +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_COMMA +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS +CLOSING_METRICS_QUOTED_IDENTIFIER +CLOSING_METRICS_UNQUOTED_IDENTIFIER +CLOSING_METRICS_BY +CLOSING_METRICS_PIPE channel names: DEFAULT_TOKEN_CHANNEL @@ -403,6 +431,8 @@ MVEXPAND_MODE SHOW_MODE META_MODE SETTING_MODE +METRICS_MODE +CLOSING_METRICS_MODE atn: -[4, 0, 110, 1203, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 484, 8, 18, 11, 18, 12, 18, 485, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 494, 8, 19, 10, 19, 12, 19, 497, 9, 19, 1, 19, 3, 19, 500, 8, 19, 1, 19, 3, 19, 503, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 512, 8, 20, 10, 20, 12, 20, 515, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 523, 8, 21, 11, 21, 12, 21, 524, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 566, 8, 32, 1, 32, 4, 32, 569, 8, 32, 11, 32, 12, 32, 570, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 580, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 592, 8, 38, 10, 38, 12, 38, 595, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 603, 8, 38, 10, 38, 12, 38, 606, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 613, 8, 38, 1, 38, 3, 38, 616, 8, 38, 3, 38, 618, 8, 38, 1, 39, 4, 39, 621, 8, 39, 11, 39, 12, 39, 622, 1, 40, 4, 40, 626, 8, 40, 11, 40, 12, 40, 627, 1, 40, 1, 40, 5, 40, 632, 8, 40, 10, 40, 12, 40, 635, 9, 40, 1, 40, 1, 40, 4, 40, 639, 8, 40, 11, 40, 12, 40, 640, 1, 40, 4, 40, 644, 8, 40, 11, 40, 12, 40, 645, 1, 40, 1, 40, 5, 40, 650, 8, 40, 10, 40, 12, 40, 653, 9, 40, 3, 40, 655, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 661, 8, 40, 11, 40, 12, 40, 662, 1, 40, 1, 40, 3, 40, 667, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 798, 8, 78, 10, 78, 12, 78, 801, 9, 78, 1, 78, 1, 78, 3, 78, 805, 8, 78, 1, 78, 4, 78, 808, 8, 78, 11, 78, 12, 78, 809, 3, 78, 812, 8, 78, 1, 79, 1, 79, 4, 79, 816, 8, 79, 11, 79, 12, 79, 817, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 881, 8, 92, 1, 93, 4, 93, 884, 8, 93, 11, 93, 12, 93, 885, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 3, 101, 921, 8, 101, 1, 102, 1, 102, 3, 102, 925, 8, 102, 1, 102, 5, 102, 928, 8, 102, 10, 102, 12, 102, 931, 9, 102, 1, 102, 1, 102, 3, 102, 935, 8, 102, 1, 102, 4, 102, 938, 8, 102, 11, 102, 12, 102, 939, 3, 102, 942, 8, 102, 1, 103, 1, 103, 4, 103, 946, 8, 103, 11, 103, 12, 103, 947, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 121, 4, 121, 1023, 8, 121, 11, 121, 12, 121, 1024, 1, 121, 1, 121, 3, 121, 1029, 8, 121, 1, 121, 4, 121, 1032, 8, 121, 11, 121, 12, 121, 1033, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 4, 156, 1188, 8, 156, 11, 156, 12, 156, 1189, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 2, 513, 604, 0, 160, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 0, 202, 75, 204, 76, 206, 77, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 78, 220, 79, 222, 80, 224, 81, 226, 0, 228, 0, 230, 0, 232, 0, 234, 82, 236, 0, 238, 83, 240, 84, 242, 85, 244, 0, 246, 0, 248, 86, 250, 87, 252, 0, 254, 88, 256, 0, 258, 0, 260, 89, 262, 90, 264, 91, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 92, 282, 93, 284, 94, 286, 0, 288, 0, 290, 0, 292, 0, 294, 95, 296, 96, 298, 97, 300, 0, 302, 98, 304, 99, 306, 100, 308, 101, 310, 0, 312, 102, 314, 103, 316, 104, 318, 105, 320, 0, 322, 106, 324, 107, 326, 108, 328, 109, 330, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1230, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 12, 332, 1, 0, 0, 0, 14, 342, 1, 0, 0, 0, 16, 349, 1, 0, 0, 0, 18, 358, 1, 0, 0, 0, 20, 365, 1, 0, 0, 0, 22, 375, 1, 0, 0, 0, 24, 382, 1, 0, 0, 0, 26, 389, 1, 0, 0, 0, 28, 403, 1, 0, 0, 0, 30, 410, 1, 0, 0, 0, 32, 418, 1, 0, 0, 0, 34, 425, 1, 0, 0, 0, 36, 437, 1, 0, 0, 0, 38, 446, 1, 0, 0, 0, 40, 452, 1, 0, 0, 0, 42, 459, 1, 0, 0, 0, 44, 466, 1, 0, 0, 0, 46, 474, 1, 0, 0, 0, 48, 483, 1, 0, 0, 0, 50, 489, 1, 0, 0, 0, 52, 506, 1, 0, 0, 0, 54, 522, 1, 0, 0, 0, 56, 528, 1, 0, 0, 0, 58, 533, 1, 0, 0, 0, 60, 538, 1, 0, 0, 0, 62, 542, 1, 0, 0, 0, 64, 546, 1, 0, 0, 0, 66, 550, 1, 0, 0, 0, 68, 554, 1, 0, 0, 0, 70, 556, 1, 0, 0, 0, 72, 558, 1, 0, 0, 0, 74, 561, 1, 0, 0, 0, 76, 563, 1, 0, 0, 0, 78, 572, 1, 0, 0, 0, 80, 574, 1, 0, 0, 0, 82, 579, 1, 0, 0, 0, 84, 581, 1, 0, 0, 0, 86, 586, 1, 0, 0, 0, 88, 617, 1, 0, 0, 0, 90, 620, 1, 0, 0, 0, 92, 666, 1, 0, 0, 0, 94, 668, 1, 0, 0, 0, 96, 671, 1, 0, 0, 0, 98, 675, 1, 0, 0, 0, 100, 679, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 684, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 691, 1, 0, 0, 0, 110, 693, 1, 0, 0, 0, 112, 699, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 710, 1, 0, 0, 0, 118, 712, 1, 0, 0, 0, 120, 715, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 723, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 732, 1, 0, 0, 0, 130, 738, 1, 0, 0, 0, 132, 741, 1, 0, 0, 0, 134, 743, 1, 0, 0, 0, 136, 749, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 756, 1, 0, 0, 0, 142, 759, 1, 0, 0, 0, 144, 762, 1, 0, 0, 0, 146, 765, 1, 0, 0, 0, 148, 767, 1, 0, 0, 0, 150, 770, 1, 0, 0, 0, 152, 772, 1, 0, 0, 0, 154, 775, 1, 0, 0, 0, 156, 777, 1, 0, 0, 0, 158, 779, 1, 0, 0, 0, 160, 781, 1, 0, 0, 0, 162, 783, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 790, 1, 0, 0, 0, 168, 811, 1, 0, 0, 0, 170, 813, 1, 0, 0, 0, 172, 821, 1, 0, 0, 0, 174, 823, 1, 0, 0, 0, 176, 827, 1, 0, 0, 0, 178, 831, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 840, 1, 0, 0, 0, 184, 844, 1, 0, 0, 0, 186, 848, 1, 0, 0, 0, 188, 852, 1, 0, 0, 0, 190, 856, 1, 0, 0, 0, 192, 860, 1, 0, 0, 0, 194, 868, 1, 0, 0, 0, 196, 880, 1, 0, 0, 0, 198, 883, 1, 0, 0, 0, 200, 887, 1, 0, 0, 0, 202, 891, 1, 0, 0, 0, 204, 895, 1, 0, 0, 0, 206, 899, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 908, 1, 0, 0, 0, 212, 912, 1, 0, 0, 0, 214, 920, 1, 0, 0, 0, 216, 941, 1, 0, 0, 0, 218, 945, 1, 0, 0, 0, 220, 949, 1, 0, 0, 0, 222, 953, 1, 0, 0, 0, 224, 957, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 966, 1, 0, 0, 0, 230, 970, 1, 0, 0, 0, 232, 974, 1, 0, 0, 0, 234, 978, 1, 0, 0, 0, 236, 981, 1, 0, 0, 0, 238, 985, 1, 0, 0, 0, 240, 989, 1, 0, 0, 0, 242, 993, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1012, 1, 0, 0, 0, 252, 1019, 1, 0, 0, 0, 254, 1028, 1, 0, 0, 0, 256, 1035, 1, 0, 0, 0, 258, 1039, 1, 0, 0, 0, 260, 1043, 1, 0, 0, 0, 262, 1047, 1, 0, 0, 0, 264, 1051, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1061, 1, 0, 0, 0, 270, 1065, 1, 0, 0, 0, 272, 1069, 1, 0, 0, 0, 274, 1073, 1, 0, 0, 0, 276, 1077, 1, 0, 0, 0, 278, 1081, 1, 0, 0, 0, 280, 1085, 1, 0, 0, 0, 282, 1089, 1, 0, 0, 0, 284, 1093, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1102, 1, 0, 0, 0, 290, 1106, 1, 0, 0, 0, 292, 1110, 1, 0, 0, 0, 294, 1114, 1, 0, 0, 0, 296, 1118, 1, 0, 0, 0, 298, 1122, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1136, 1, 0, 0, 0, 306, 1140, 1, 0, 0, 0, 308, 1144, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1153, 1, 0, 0, 0, 314, 1163, 1, 0, 0, 0, 316, 1167, 1, 0, 0, 0, 318, 1171, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1180, 1, 0, 0, 0, 324, 1187, 1, 0, 0, 0, 326, 1191, 1, 0, 0, 0, 328, 1195, 1, 0, 0, 0, 330, 1199, 1, 0, 0, 0, 332, 333, 5, 100, 0, 0, 333, 334, 5, 105, 0, 0, 334, 335, 5, 115, 0, 0, 335, 336, 5, 115, 0, 0, 336, 337, 5, 101, 0, 0, 337, 338, 5, 99, 0, 0, 338, 339, 5, 116, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 6, 0, 0, 0, 341, 13, 1, 0, 0, 0, 342, 343, 5, 100, 0, 0, 343, 344, 5, 114, 0, 0, 344, 345, 5, 111, 0, 0, 345, 346, 5, 112, 0, 0, 346, 347, 1, 0, 0, 0, 347, 348, 6, 1, 1, 0, 348, 15, 1, 0, 0, 0, 349, 350, 5, 101, 0, 0, 350, 351, 5, 110, 0, 0, 351, 352, 5, 114, 0, 0, 352, 353, 5, 105, 0, 0, 353, 354, 5, 99, 0, 0, 354, 355, 5, 104, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 2, 2, 0, 357, 17, 1, 0, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 118, 0, 0, 360, 361, 5, 97, 0, 0, 361, 362, 5, 108, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 3, 0, 0, 364, 19, 1, 0, 0, 0, 365, 366, 5, 101, 0, 0, 366, 367, 5, 120, 0, 0, 367, 368, 5, 112, 0, 0, 368, 369, 5, 108, 0, 0, 369, 370, 5, 97, 0, 0, 370, 371, 5, 105, 0, 0, 371, 372, 5, 110, 0, 0, 372, 373, 1, 0, 0, 0, 373, 374, 6, 4, 3, 0, 374, 21, 1, 0, 0, 0, 375, 376, 5, 102, 0, 0, 376, 377, 5, 114, 0, 0, 377, 378, 5, 111, 0, 0, 378, 379, 5, 109, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 5, 4, 0, 381, 23, 1, 0, 0, 0, 382, 383, 5, 103, 0, 0, 383, 384, 5, 114, 0, 0, 384, 385, 5, 111, 0, 0, 385, 386, 5, 107, 0, 0, 386, 387, 1, 0, 0, 0, 387, 388, 6, 6, 0, 0, 388, 25, 1, 0, 0, 0, 389, 390, 5, 105, 0, 0, 390, 391, 5, 110, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 5, 105, 0, 0, 393, 394, 5, 110, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, 115, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 97, 0, 0, 398, 399, 5, 116, 0, 0, 399, 400, 5, 115, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 7, 0, 0, 402, 27, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 1, 0, 0, 0, 408, 409, 6, 8, 1, 0, 409, 29, 1, 0, 0, 0, 410, 411, 5, 108, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 105, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 1, 0, 0, 0, 416, 417, 6, 9, 0, 0, 417, 31, 1, 0, 0, 0, 418, 419, 5, 109, 0, 0, 419, 420, 5, 101, 0, 0, 420, 421, 5, 116, 0, 0, 421, 422, 5, 97, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 10, 5, 0, 424, 33, 1, 0, 0, 0, 425, 426, 5, 109, 0, 0, 426, 427, 5, 118, 0, 0, 427, 428, 5, 95, 0, 0, 428, 429, 5, 101, 0, 0, 429, 430, 5, 120, 0, 0, 430, 431, 5, 112, 0, 0, 431, 432, 5, 97, 0, 0, 432, 433, 5, 110, 0, 0, 433, 434, 5, 100, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 11, 6, 0, 436, 35, 1, 0, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 101, 0, 0, 439, 440, 5, 110, 0, 0, 440, 441, 5, 97, 0, 0, 441, 442, 5, 109, 0, 0, 442, 443, 5, 101, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 12, 7, 0, 445, 37, 1, 0, 0, 0, 446, 447, 5, 114, 0, 0, 447, 448, 5, 111, 0, 0, 448, 449, 5, 119, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 13, 0, 0, 451, 39, 1, 0, 0, 0, 452, 453, 5, 115, 0, 0, 453, 454, 5, 104, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 119, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 14, 8, 0, 458, 41, 1, 0, 0, 0, 459, 460, 5, 115, 0, 0, 460, 461, 5, 111, 0, 0, 461, 462, 5, 114, 0, 0, 462, 463, 5, 116, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 15, 0, 0, 465, 43, 1, 0, 0, 0, 466, 467, 5, 115, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 97, 0, 0, 469, 470, 5, 116, 0, 0, 470, 471, 5, 115, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 6, 16, 0, 0, 473, 45, 1, 0, 0, 0, 474, 475, 5, 119, 0, 0, 475, 476, 5, 104, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 17, 0, 0, 481, 47, 1, 0, 0, 0, 482, 484, 8, 0, 0, 0, 483, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 18, 0, 0, 488, 49, 1, 0, 0, 0, 489, 490, 5, 47, 0, 0, 490, 491, 5, 47, 0, 0, 491, 495, 1, 0, 0, 0, 492, 494, 8, 1, 0, 0, 493, 492, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 498, 500, 5, 13, 0, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 502, 1, 0, 0, 0, 501, 503, 5, 10, 0, 0, 502, 501, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 19, 9, 0, 505, 51, 1, 0, 0, 0, 506, 507, 5, 47, 0, 0, 507, 508, 5, 42, 0, 0, 508, 513, 1, 0, 0, 0, 509, 512, 3, 52, 20, 0, 510, 512, 9, 0, 0, 0, 511, 509, 1, 0, 0, 0, 511, 510, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 516, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 5, 42, 0, 0, 517, 518, 5, 47, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 20, 9, 0, 520, 53, 1, 0, 0, 0, 521, 523, 7, 2, 0, 0, 522, 521, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 21, 9, 0, 527, 55, 1, 0, 0, 0, 528, 529, 3, 164, 76, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 22, 10, 0, 531, 532, 6, 22, 11, 0, 532, 57, 1, 0, 0, 0, 533, 534, 3, 66, 27, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 23, 12, 0, 536, 537, 6, 23, 13, 0, 537, 59, 1, 0, 0, 0, 538, 539, 3, 54, 21, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 24, 9, 0, 541, 61, 1, 0, 0, 0, 542, 543, 3, 50, 19, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 25, 9, 0, 545, 63, 1, 0, 0, 0, 546, 547, 3, 52, 20, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 26, 9, 0, 549, 65, 1, 0, 0, 0, 550, 551, 5, 124, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 6, 27, 13, 0, 553, 67, 1, 0, 0, 0, 554, 555, 7, 3, 0, 0, 555, 69, 1, 0, 0, 0, 556, 557, 7, 4, 0, 0, 557, 71, 1, 0, 0, 0, 558, 559, 5, 92, 0, 0, 559, 560, 7, 5, 0, 0, 560, 73, 1, 0, 0, 0, 561, 562, 8, 6, 0, 0, 562, 75, 1, 0, 0, 0, 563, 565, 7, 7, 0, 0, 564, 566, 7, 8, 0, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 568, 1, 0, 0, 0, 567, 569, 3, 68, 28, 0, 568, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 568, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 77, 1, 0, 0, 0, 572, 573, 5, 64, 0, 0, 573, 79, 1, 0, 0, 0, 574, 575, 5, 96, 0, 0, 575, 81, 1, 0, 0, 0, 576, 580, 8, 9, 0, 0, 577, 578, 5, 96, 0, 0, 578, 580, 5, 96, 0, 0, 579, 576, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 580, 83, 1, 0, 0, 0, 581, 582, 5, 95, 0, 0, 582, 85, 1, 0, 0, 0, 583, 587, 3, 70, 29, 0, 584, 587, 3, 68, 28, 0, 585, 587, 3, 84, 36, 0, 586, 583, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 585, 1, 0, 0, 0, 587, 87, 1, 0, 0, 0, 588, 593, 5, 34, 0, 0, 589, 592, 3, 72, 30, 0, 590, 592, 3, 74, 31, 0, 591, 589, 1, 0, 0, 0, 591, 590, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 593, 1, 0, 0, 0, 596, 618, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 600, 5, 34, 0, 0, 600, 604, 1, 0, 0, 0, 601, 603, 8, 1, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 34, 0, 0, 608, 609, 5, 34, 0, 0, 609, 610, 5, 34, 0, 0, 610, 612, 1, 0, 0, 0, 611, 613, 5, 34, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 615, 1, 0, 0, 0, 614, 616, 5, 34, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 588, 1, 0, 0, 0, 617, 597, 1, 0, 0, 0, 618, 89, 1, 0, 0, 0, 619, 621, 3, 68, 28, 0, 620, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 91, 1, 0, 0, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 633, 3, 108, 48, 0, 630, 632, 3, 68, 28, 0, 631, 630, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 667, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 108, 48, 0, 637, 639, 3, 68, 28, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 667, 1, 0, 0, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 654, 1, 0, 0, 0, 647, 651, 3, 108, 48, 0, 648, 650, 3, 68, 28, 0, 649, 648, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 655, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 654, 647, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 3, 76, 32, 0, 657, 667, 1, 0, 0, 0, 658, 660, 3, 108, 48, 0, 659, 661, 3, 68, 28, 0, 660, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 665, 3, 76, 32, 0, 665, 667, 1, 0, 0, 0, 666, 625, 1, 0, 0, 0, 666, 636, 1, 0, 0, 0, 666, 643, 1, 0, 0, 0, 666, 658, 1, 0, 0, 0, 667, 93, 1, 0, 0, 0, 668, 669, 5, 98, 0, 0, 669, 670, 5, 121, 0, 0, 670, 95, 1, 0, 0, 0, 671, 672, 5, 97, 0, 0, 672, 673, 5, 110, 0, 0, 673, 674, 5, 100, 0, 0, 674, 97, 1, 0, 0, 0, 675, 676, 5, 97, 0, 0, 676, 677, 5, 115, 0, 0, 677, 678, 5, 99, 0, 0, 678, 99, 1, 0, 0, 0, 679, 680, 5, 61, 0, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 58, 0, 0, 682, 683, 5, 58, 0, 0, 683, 103, 1, 0, 0, 0, 684, 685, 5, 44, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 100, 0, 0, 687, 688, 5, 101, 0, 0, 688, 689, 5, 115, 0, 0, 689, 690, 5, 99, 0, 0, 690, 107, 1, 0, 0, 0, 691, 692, 5, 46, 0, 0, 692, 109, 1, 0, 0, 0, 693, 694, 5, 102, 0, 0, 694, 695, 5, 97, 0, 0, 695, 696, 5, 108, 0, 0, 696, 697, 5, 115, 0, 0, 697, 698, 5, 101, 0, 0, 698, 111, 1, 0, 0, 0, 699, 700, 5, 102, 0, 0, 700, 701, 5, 105, 0, 0, 701, 702, 5, 114, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 108, 0, 0, 706, 707, 5, 97, 0, 0, 707, 708, 5, 115, 0, 0, 708, 709, 5, 116, 0, 0, 709, 115, 1, 0, 0, 0, 710, 711, 5, 40, 0, 0, 711, 117, 1, 0, 0, 0, 712, 713, 5, 105, 0, 0, 713, 714, 5, 110, 0, 0, 714, 119, 1, 0, 0, 0, 715, 716, 5, 105, 0, 0, 716, 717, 5, 115, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 108, 0, 0, 719, 720, 5, 105, 0, 0, 720, 721, 5, 107, 0, 0, 721, 722, 5, 101, 0, 0, 722, 123, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 111, 0, 0, 725, 726, 5, 116, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 127, 1, 0, 0, 0, 732, 733, 5, 110, 0, 0, 733, 734, 5, 117, 0, 0, 734, 735, 5, 108, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 115, 0, 0, 737, 129, 1, 0, 0, 0, 738, 739, 5, 111, 0, 0, 739, 740, 5, 114, 0, 0, 740, 131, 1, 0, 0, 0, 741, 742, 5, 63, 0, 0, 742, 133, 1, 0, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 108, 0, 0, 745, 746, 5, 105, 0, 0, 746, 747, 5, 107, 0, 0, 747, 748, 5, 101, 0, 0, 748, 135, 1, 0, 0, 0, 749, 750, 5, 41, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 116, 0, 0, 752, 753, 5, 114, 0, 0, 753, 754, 5, 117, 0, 0, 754, 755, 5, 101, 0, 0, 755, 139, 1, 0, 0, 0, 756, 757, 5, 61, 0, 0, 757, 758, 5, 61, 0, 0, 758, 141, 1, 0, 0, 0, 759, 760, 5, 61, 0, 0, 760, 761, 5, 126, 0, 0, 761, 143, 1, 0, 0, 0, 762, 763, 5, 33, 0, 0, 763, 764, 5, 61, 0, 0, 764, 145, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 147, 1, 0, 0, 0, 767, 768, 5, 60, 0, 0, 768, 769, 5, 61, 0, 0, 769, 149, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 151, 1, 0, 0, 0, 772, 773, 5, 62, 0, 0, 773, 774, 5, 61, 0, 0, 774, 153, 1, 0, 0, 0, 775, 776, 5, 43, 0, 0, 776, 155, 1, 0, 0, 0, 777, 778, 5, 45, 0, 0, 778, 157, 1, 0, 0, 0, 779, 780, 5, 42, 0, 0, 780, 159, 1, 0, 0, 0, 781, 782, 5, 47, 0, 0, 782, 161, 1, 0, 0, 0, 783, 784, 5, 37, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 91, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 0, 0, 788, 789, 6, 76, 0, 0, 789, 165, 1, 0, 0, 0, 790, 791, 5, 93, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 77, 13, 0, 793, 794, 6, 77, 13, 0, 794, 167, 1, 0, 0, 0, 795, 799, 3, 70, 29, 0, 796, 798, 3, 86, 37, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 812, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 805, 3, 84, 36, 0, 803, 805, 3, 78, 33, 0, 804, 802, 1, 0, 0, 0, 804, 803, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 808, 3, 86, 37, 0, 807, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 812, 1, 0, 0, 0, 811, 795, 1, 0, 0, 0, 811, 804, 1, 0, 0, 0, 812, 169, 1, 0, 0, 0, 813, 815, 3, 80, 34, 0, 814, 816, 3, 82, 35, 0, 815, 814, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 815, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 3, 80, 34, 0, 820, 171, 1, 0, 0, 0, 821, 822, 3, 170, 79, 0, 822, 173, 1, 0, 0, 0, 823, 824, 3, 50, 19, 0, 824, 825, 1, 0, 0, 0, 825, 826, 6, 81, 9, 0, 826, 175, 1, 0, 0, 0, 827, 828, 3, 52, 20, 0, 828, 829, 1, 0, 0, 0, 829, 830, 6, 82, 9, 0, 830, 177, 1, 0, 0, 0, 831, 832, 3, 54, 21, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 83, 9, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 66, 27, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 12, 0, 838, 839, 6, 84, 13, 0, 839, 181, 1, 0, 0, 0, 840, 841, 3, 164, 76, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 85, 10, 0, 843, 183, 1, 0, 0, 0, 844, 845, 3, 166, 77, 0, 845, 846, 1, 0, 0, 0, 846, 847, 6, 86, 14, 0, 847, 185, 1, 0, 0, 0, 848, 849, 3, 104, 46, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 87, 15, 0, 851, 187, 1, 0, 0, 0, 852, 853, 3, 100, 44, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 88, 16, 0, 855, 189, 1, 0, 0, 0, 856, 857, 3, 88, 38, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 89, 17, 0, 859, 191, 1, 0, 0, 0, 860, 861, 5, 111, 0, 0, 861, 862, 5, 112, 0, 0, 862, 863, 5, 116, 0, 0, 863, 864, 5, 105, 0, 0, 864, 865, 5, 111, 0, 0, 865, 866, 5, 110, 0, 0, 866, 867, 5, 115, 0, 0, 867, 193, 1, 0, 0, 0, 868, 869, 5, 109, 0, 0, 869, 870, 5, 101, 0, 0, 870, 871, 5, 116, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 100, 0, 0, 873, 874, 5, 97, 0, 0, 874, 875, 5, 116, 0, 0, 875, 876, 5, 97, 0, 0, 876, 195, 1, 0, 0, 0, 877, 881, 8, 10, 0, 0, 878, 879, 5, 47, 0, 0, 879, 881, 8, 11, 0, 0, 880, 877, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 884, 3, 196, 92, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 199, 1, 0, 0, 0, 887, 888, 3, 172, 80, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 94, 18, 0, 890, 201, 1, 0, 0, 0, 891, 892, 3, 50, 19, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 95, 9, 0, 894, 203, 1, 0, 0, 0, 895, 896, 3, 52, 20, 0, 896, 897, 1, 0, 0, 0, 897, 898, 6, 96, 9, 0, 898, 205, 1, 0, 0, 0, 899, 900, 3, 54, 21, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 97, 9, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 66, 27, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 12, 0, 906, 907, 6, 98, 13, 0, 907, 209, 1, 0, 0, 0, 908, 909, 3, 108, 48, 0, 909, 910, 1, 0, 0, 0, 910, 911, 6, 99, 19, 0, 911, 211, 1, 0, 0, 0, 912, 913, 3, 104, 46, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 100, 15, 0, 915, 213, 1, 0, 0, 0, 916, 921, 3, 70, 29, 0, 917, 921, 3, 68, 28, 0, 918, 921, 3, 84, 36, 0, 919, 921, 3, 158, 73, 0, 920, 916, 1, 0, 0, 0, 920, 917, 1, 0, 0, 0, 920, 918, 1, 0, 0, 0, 920, 919, 1, 0, 0, 0, 921, 215, 1, 0, 0, 0, 922, 925, 3, 70, 29, 0, 923, 925, 3, 158, 73, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 929, 1, 0, 0, 0, 926, 928, 3, 214, 101, 0, 927, 926, 1, 0, 0, 0, 928, 931, 1, 0, 0, 0, 929, 927, 1, 0, 0, 0, 929, 930, 1, 0, 0, 0, 930, 942, 1, 0, 0, 0, 931, 929, 1, 0, 0, 0, 932, 935, 3, 84, 36, 0, 933, 935, 3, 78, 33, 0, 934, 932, 1, 0, 0, 0, 934, 933, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 938, 3, 214, 101, 0, 937, 936, 1, 0, 0, 0, 938, 939, 1, 0, 0, 0, 939, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 942, 1, 0, 0, 0, 941, 924, 1, 0, 0, 0, 941, 934, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 946, 3, 216, 102, 0, 944, 946, 3, 170, 79, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 219, 1, 0, 0, 0, 949, 950, 3, 50, 19, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 104, 9, 0, 952, 221, 1, 0, 0, 0, 953, 954, 3, 52, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 105, 9, 0, 956, 223, 1, 0, 0, 0, 957, 958, 3, 54, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 106, 9, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 66, 27, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 12, 0, 964, 965, 6, 107, 13, 0, 965, 227, 1, 0, 0, 0, 966, 967, 3, 100, 44, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 108, 16, 0, 969, 229, 1, 0, 0, 0, 970, 971, 3, 104, 46, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 109, 15, 0, 973, 231, 1, 0, 0, 0, 974, 975, 3, 108, 48, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 110, 19, 0, 977, 233, 1, 0, 0, 0, 978, 979, 5, 97, 0, 0, 979, 980, 5, 115, 0, 0, 980, 235, 1, 0, 0, 0, 981, 982, 3, 218, 103, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 112, 20, 0, 984, 237, 1, 0, 0, 0, 985, 986, 3, 50, 19, 0, 986, 987, 1, 0, 0, 0, 987, 988, 6, 113, 9, 0, 988, 239, 1, 0, 0, 0, 989, 990, 3, 52, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 114, 9, 0, 992, 241, 1, 0, 0, 0, 993, 994, 3, 54, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 115, 9, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 66, 27, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 12, 0, 1000, 1001, 6, 116, 13, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 3, 164, 76, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 10, 0, 1005, 1006, 6, 117, 21, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 111, 0, 0, 1008, 1009, 5, 110, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 118, 22, 0, 1011, 249, 1, 0, 0, 0, 1012, 1013, 5, 119, 0, 0, 1013, 1014, 5, 105, 0, 0, 1014, 1015, 5, 116, 0, 0, 1015, 1016, 5, 104, 0, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 119, 22, 0, 1018, 251, 1, 0, 0, 0, 1019, 1020, 8, 12, 0, 0, 1020, 253, 1, 0, 0, 0, 1021, 1023, 3, 252, 120, 0, 1022, 1021, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1022, 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 3, 322, 155, 0, 1027, 1029, 1, 0, 0, 0, 1028, 1022, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1031, 1, 0, 0, 0, 1030, 1032, 3, 252, 120, 0, 1031, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1031, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 255, 1, 0, 0, 0, 1035, 1036, 3, 172, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 122, 18, 0, 1038, 257, 1, 0, 0, 0, 1039, 1040, 3, 254, 121, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 123, 23, 0, 1042, 259, 1, 0, 0, 0, 1043, 1044, 3, 50, 19, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 124, 9, 0, 1046, 261, 1, 0, 0, 0, 1047, 1048, 3, 52, 20, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 125, 9, 0, 1050, 263, 1, 0, 0, 0, 1051, 1052, 3, 54, 21, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 126, 9, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 66, 27, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 12, 0, 1058, 1059, 6, 127, 13, 0, 1059, 1060, 6, 127, 13, 0, 1060, 267, 1, 0, 0, 0, 1061, 1062, 3, 100, 44, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 128, 16, 0, 1064, 269, 1, 0, 0, 0, 1065, 1066, 3, 104, 46, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 129, 15, 0, 1068, 271, 1, 0, 0, 0, 1069, 1070, 3, 108, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 130, 19, 0, 1072, 273, 1, 0, 0, 0, 1073, 1074, 3, 250, 119, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 131, 24, 0, 1076, 275, 1, 0, 0, 0, 1077, 1078, 3, 218, 103, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 132, 20, 0, 1080, 277, 1, 0, 0, 0, 1081, 1082, 3, 172, 80, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 133, 18, 0, 1084, 279, 1, 0, 0, 0, 1085, 1086, 3, 50, 19, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 134, 9, 0, 1088, 281, 1, 0, 0, 0, 1089, 1090, 3, 52, 20, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 135, 9, 0, 1092, 283, 1, 0, 0, 0, 1093, 1094, 3, 54, 21, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 136, 9, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 66, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 12, 0, 1100, 1101, 6, 137, 13, 0, 1101, 287, 1, 0, 0, 0, 1102, 1103, 3, 108, 48, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 138, 19, 0, 1105, 289, 1, 0, 0, 0, 1106, 1107, 3, 172, 80, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 139, 18, 0, 1109, 291, 1, 0, 0, 0, 1110, 1111, 3, 168, 78, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 140, 25, 0, 1113, 293, 1, 0, 0, 0, 1114, 1115, 3, 50, 19, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 141, 9, 0, 1117, 295, 1, 0, 0, 0, 1118, 1119, 3, 52, 20, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 142, 9, 0, 1121, 297, 1, 0, 0, 0, 1122, 1123, 3, 54, 21, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 143, 9, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 3, 66, 27, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 144, 12, 0, 1129, 1130, 6, 144, 13, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 5, 105, 0, 0, 1132, 1133, 5, 110, 0, 0, 1133, 1134, 5, 102, 0, 0, 1134, 1135, 5, 111, 0, 0, 1135, 303, 1, 0, 0, 0, 1136, 1137, 3, 50, 19, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 146, 9, 0, 1139, 305, 1, 0, 0, 0, 1140, 1141, 3, 52, 20, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1143, 6, 147, 9, 0, 1143, 307, 1, 0, 0, 0, 1144, 1145, 3, 54, 21, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 148, 9, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 3, 66, 27, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1151, 6, 149, 12, 0, 1151, 1152, 6, 149, 13, 0, 1152, 311, 1, 0, 0, 0, 1153, 1154, 5, 102, 0, 0, 1154, 1155, 5, 117, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 99, 0, 0, 1157, 1158, 5, 116, 0, 0, 1158, 1159, 5, 105, 0, 0, 1159, 1160, 5, 111, 0, 0, 1160, 1161, 5, 110, 0, 0, 1161, 1162, 5, 115, 0, 0, 1162, 313, 1, 0, 0, 0, 1163, 1164, 3, 50, 19, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 6, 151, 9, 0, 1166, 315, 1, 0, 0, 0, 1167, 1168, 3, 52, 20, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 152, 9, 0, 1170, 317, 1, 0, 0, 0, 1171, 1172, 3, 54, 21, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 153, 9, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 3, 166, 77, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 154, 14, 0, 1178, 1179, 6, 154, 13, 0, 1179, 321, 1, 0, 0, 0, 1180, 1181, 5, 58, 0, 0, 1181, 323, 1, 0, 0, 0, 1182, 1188, 3, 78, 33, 0, 1183, 1188, 3, 68, 28, 0, 1184, 1188, 3, 108, 48, 0, 1185, 1188, 3, 70, 29, 0, 1186, 1188, 3, 84, 36, 0, 1187, 1182, 1, 0, 0, 0, 1187, 1183, 1, 0, 0, 0, 1187, 1184, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1186, 1, 0, 0, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1189, 1190, 1, 0, 0, 0, 1190, 325, 1, 0, 0, 0, 1191, 1192, 3, 50, 19, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 157, 9, 0, 1194, 327, 1, 0, 0, 0, 1195, 1196, 3, 52, 20, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 158, 9, 0, 1198, 329, 1, 0, 0, 0, 1199, 1200, 3, 54, 21, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 159, 9, 0, 1202, 331, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 485, 495, 499, 502, 511, 513, 524, 565, 570, 579, 586, 591, 593, 604, 612, 615, 617, 622, 627, 633, 640, 645, 651, 654, 662, 666, 799, 804, 809, 811, 817, 880, 885, 920, 924, 929, 934, 939, 941, 945, 947, 1024, 1028, 1033, 1187, 1189, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 68, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file +[4, 0, 117, 1307, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 524, 8, 19, 11, 19, 12, 19, 525, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 534, 8, 20, 10, 20, 12, 20, 537, 9, 20, 1, 20, 3, 20, 540, 8, 20, 1, 20, 3, 20, 543, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 552, 8, 21, 10, 21, 12, 21, 555, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 563, 8, 22, 11, 22, 12, 22, 564, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 3, 23, 572, 8, 23, 1, 24, 4, 24, 575, 8, 24, 11, 24, 12, 24, 576, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 3, 35, 616, 8, 35, 1, 35, 4, 35, 619, 8, 35, 11, 35, 12, 35, 620, 1, 36, 1, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 630, 8, 38, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 3, 40, 637, 8, 40, 1, 41, 1, 41, 1, 41, 5, 41, 642, 8, 41, 10, 41, 12, 41, 645, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 653, 8, 41, 10, 41, 12, 41, 656, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 663, 8, 41, 1, 41, 3, 41, 666, 8, 41, 3, 41, 668, 8, 41, 1, 42, 4, 42, 671, 8, 42, 11, 42, 12, 42, 672, 1, 43, 4, 43, 676, 8, 43, 11, 43, 12, 43, 677, 1, 43, 1, 43, 5, 43, 682, 8, 43, 10, 43, 12, 43, 685, 9, 43, 1, 43, 1, 43, 4, 43, 689, 8, 43, 11, 43, 12, 43, 690, 1, 43, 4, 43, 694, 8, 43, 11, 43, 12, 43, 695, 1, 43, 1, 43, 5, 43, 700, 8, 43, 10, 43, 12, 43, 703, 9, 43, 3, 43, 705, 8, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 711, 8, 43, 11, 43, 12, 43, 712, 1, 43, 1, 43, 3, 43, 717, 8, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 5, 81, 848, 8, 81, 10, 81, 12, 81, 851, 9, 81, 1, 81, 1, 81, 3, 81, 855, 8, 81, 1, 81, 4, 81, 858, 8, 81, 11, 81, 12, 81, 859, 3, 81, 862, 8, 81, 1, 82, 1, 82, 4, 82, 866, 8, 82, 11, 82, 12, 82, 867, 1, 82, 1, 82, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 3, 102, 961, 8, 102, 1, 103, 1, 103, 3, 103, 965, 8, 103, 1, 103, 5, 103, 968, 8, 103, 10, 103, 12, 103, 971, 9, 103, 1, 103, 1, 103, 3, 103, 975, 8, 103, 1, 103, 4, 103, 978, 8, 103, 11, 103, 12, 103, 979, 3, 103, 982, 8, 103, 1, 104, 1, 104, 4, 104, 986, 8, 104, 11, 104, 12, 104, 987, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 122, 4, 122, 1063, 8, 122, 11, 122, 12, 122, 1064, 1, 122, 1, 122, 3, 122, 1069, 8, 122, 1, 122, 4, 122, 1072, 8, 122, 11, 122, 12, 122, 1073, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 4, 157, 1228, 8, 157, 11, 157, 12, 157, 1229, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 2, 553, 654, 0, 174, 14, 1, 16, 2, 18, 3, 20, 4, 22, 5, 24, 6, 26, 7, 28, 8, 30, 9, 32, 10, 34, 11, 36, 12, 38, 13, 40, 14, 42, 15, 44, 16, 46, 17, 48, 18, 50, 19, 52, 20, 54, 21, 56, 22, 58, 23, 60, 0, 62, 24, 64, 0, 66, 0, 68, 25, 70, 26, 72, 27, 74, 28, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 29, 98, 30, 100, 31, 102, 32, 104, 33, 106, 34, 108, 35, 110, 36, 112, 37, 114, 38, 116, 39, 118, 40, 120, 41, 122, 42, 124, 43, 126, 44, 128, 45, 130, 46, 132, 47, 134, 48, 136, 49, 138, 50, 140, 51, 142, 52, 144, 53, 146, 54, 148, 55, 150, 56, 152, 57, 154, 58, 156, 59, 158, 60, 160, 61, 162, 62, 164, 63, 166, 64, 168, 65, 170, 66, 172, 67, 174, 68, 176, 69, 178, 0, 180, 70, 182, 71, 184, 72, 186, 73, 188, 0, 190, 0, 192, 0, 194, 0, 196, 0, 198, 0, 200, 74, 202, 75, 204, 0, 206, 76, 208, 77, 210, 78, 212, 0, 214, 0, 216, 0, 218, 0, 220, 0, 222, 79, 224, 80, 226, 81, 228, 82, 230, 0, 232, 0, 234, 0, 236, 0, 238, 83, 240, 0, 242, 84, 244, 85, 246, 86, 248, 0, 250, 0, 252, 87, 254, 88, 256, 0, 258, 89, 260, 0, 262, 0, 264, 90, 266, 91, 268, 92, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 0, 282, 0, 284, 93, 286, 94, 288, 95, 290, 0, 292, 0, 294, 0, 296, 0, 298, 96, 300, 97, 302, 98, 304, 0, 306, 99, 308, 100, 310, 101, 312, 102, 314, 0, 316, 103, 318, 104, 320, 105, 322, 106, 324, 0, 326, 107, 328, 108, 330, 109, 332, 110, 334, 111, 336, 0, 338, 0, 340, 112, 342, 113, 344, 114, 346, 0, 348, 115, 350, 116, 352, 117, 354, 0, 356, 0, 358, 0, 360, 0, 14, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1332, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 1, 66, 1, 0, 0, 0, 1, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 2, 74, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 180, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 2, 184, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 214, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 4, 226, 1, 0, 0, 0, 4, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 9, 310, 1, 0, 0, 0, 9, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 10, 320, 1, 0, 0, 0, 10, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 11, 332, 1, 0, 0, 0, 11, 334, 1, 0, 0, 0, 12, 336, 1, 0, 0, 0, 12, 338, 1, 0, 0, 0, 12, 340, 1, 0, 0, 0, 12, 342, 1, 0, 0, 0, 12, 344, 1, 0, 0, 0, 13, 346, 1, 0, 0, 0, 13, 348, 1, 0, 0, 0, 13, 350, 1, 0, 0, 0, 13, 352, 1, 0, 0, 0, 13, 354, 1, 0, 0, 0, 13, 356, 1, 0, 0, 0, 13, 358, 1, 0, 0, 0, 13, 360, 1, 0, 0, 0, 14, 362, 1, 0, 0, 0, 16, 372, 1, 0, 0, 0, 18, 379, 1, 0, 0, 0, 20, 388, 1, 0, 0, 0, 22, 395, 1, 0, 0, 0, 24, 405, 1, 0, 0, 0, 26, 412, 1, 0, 0, 0, 28, 419, 1, 0, 0, 0, 30, 433, 1, 0, 0, 0, 32, 440, 1, 0, 0, 0, 34, 448, 1, 0, 0, 0, 36, 455, 1, 0, 0, 0, 38, 465, 1, 0, 0, 0, 40, 477, 1, 0, 0, 0, 42, 486, 1, 0, 0, 0, 44, 492, 1, 0, 0, 0, 46, 499, 1, 0, 0, 0, 48, 506, 1, 0, 0, 0, 50, 514, 1, 0, 0, 0, 52, 523, 1, 0, 0, 0, 54, 529, 1, 0, 0, 0, 56, 546, 1, 0, 0, 0, 58, 562, 1, 0, 0, 0, 60, 571, 1, 0, 0, 0, 62, 574, 1, 0, 0, 0, 64, 578, 1, 0, 0, 0, 66, 583, 1, 0, 0, 0, 68, 588, 1, 0, 0, 0, 70, 592, 1, 0, 0, 0, 72, 596, 1, 0, 0, 0, 74, 600, 1, 0, 0, 0, 76, 604, 1, 0, 0, 0, 78, 606, 1, 0, 0, 0, 80, 608, 1, 0, 0, 0, 82, 611, 1, 0, 0, 0, 84, 613, 1, 0, 0, 0, 86, 622, 1, 0, 0, 0, 88, 624, 1, 0, 0, 0, 90, 629, 1, 0, 0, 0, 92, 631, 1, 0, 0, 0, 94, 636, 1, 0, 0, 0, 96, 667, 1, 0, 0, 0, 98, 670, 1, 0, 0, 0, 100, 716, 1, 0, 0, 0, 102, 718, 1, 0, 0, 0, 104, 721, 1, 0, 0, 0, 106, 725, 1, 0, 0, 0, 108, 729, 1, 0, 0, 0, 110, 731, 1, 0, 0, 0, 112, 734, 1, 0, 0, 0, 114, 736, 1, 0, 0, 0, 116, 741, 1, 0, 0, 0, 118, 743, 1, 0, 0, 0, 120, 749, 1, 0, 0, 0, 122, 755, 1, 0, 0, 0, 124, 760, 1, 0, 0, 0, 126, 762, 1, 0, 0, 0, 128, 765, 1, 0, 0, 0, 130, 768, 1, 0, 0, 0, 132, 773, 1, 0, 0, 0, 134, 777, 1, 0, 0, 0, 136, 782, 1, 0, 0, 0, 138, 788, 1, 0, 0, 0, 140, 791, 1, 0, 0, 0, 142, 793, 1, 0, 0, 0, 144, 799, 1, 0, 0, 0, 146, 801, 1, 0, 0, 0, 148, 806, 1, 0, 0, 0, 150, 809, 1, 0, 0, 0, 152, 812, 1, 0, 0, 0, 154, 815, 1, 0, 0, 0, 156, 817, 1, 0, 0, 0, 158, 820, 1, 0, 0, 0, 160, 822, 1, 0, 0, 0, 162, 825, 1, 0, 0, 0, 164, 827, 1, 0, 0, 0, 166, 829, 1, 0, 0, 0, 168, 831, 1, 0, 0, 0, 170, 833, 1, 0, 0, 0, 172, 835, 1, 0, 0, 0, 174, 840, 1, 0, 0, 0, 176, 861, 1, 0, 0, 0, 178, 863, 1, 0, 0, 0, 180, 871, 1, 0, 0, 0, 182, 873, 1, 0, 0, 0, 184, 877, 1, 0, 0, 0, 186, 881, 1, 0, 0, 0, 188, 885, 1, 0, 0, 0, 190, 890, 1, 0, 0, 0, 192, 894, 1, 0, 0, 0, 194, 898, 1, 0, 0, 0, 196, 902, 1, 0, 0, 0, 198, 906, 1, 0, 0, 0, 200, 910, 1, 0, 0, 0, 202, 918, 1, 0, 0, 0, 204, 927, 1, 0, 0, 0, 206, 931, 1, 0, 0, 0, 208, 935, 1, 0, 0, 0, 210, 939, 1, 0, 0, 0, 212, 943, 1, 0, 0, 0, 214, 948, 1, 0, 0, 0, 216, 952, 1, 0, 0, 0, 218, 960, 1, 0, 0, 0, 220, 981, 1, 0, 0, 0, 222, 985, 1, 0, 0, 0, 224, 989, 1, 0, 0, 0, 226, 993, 1, 0, 0, 0, 228, 997, 1, 0, 0, 0, 230, 1001, 1, 0, 0, 0, 232, 1006, 1, 0, 0, 0, 234, 1010, 1, 0, 0, 0, 236, 1014, 1, 0, 0, 0, 238, 1018, 1, 0, 0, 0, 240, 1021, 1, 0, 0, 0, 242, 1025, 1, 0, 0, 0, 244, 1029, 1, 0, 0, 0, 246, 1033, 1, 0, 0, 0, 248, 1037, 1, 0, 0, 0, 250, 1042, 1, 0, 0, 0, 252, 1047, 1, 0, 0, 0, 254, 1052, 1, 0, 0, 0, 256, 1059, 1, 0, 0, 0, 258, 1068, 1, 0, 0, 0, 260, 1075, 1, 0, 0, 0, 262, 1079, 1, 0, 0, 0, 264, 1083, 1, 0, 0, 0, 266, 1087, 1, 0, 0, 0, 268, 1091, 1, 0, 0, 0, 270, 1095, 1, 0, 0, 0, 272, 1101, 1, 0, 0, 0, 274, 1105, 1, 0, 0, 0, 276, 1109, 1, 0, 0, 0, 278, 1113, 1, 0, 0, 0, 280, 1117, 1, 0, 0, 0, 282, 1121, 1, 0, 0, 0, 284, 1125, 1, 0, 0, 0, 286, 1129, 1, 0, 0, 0, 288, 1133, 1, 0, 0, 0, 290, 1137, 1, 0, 0, 0, 292, 1142, 1, 0, 0, 0, 294, 1146, 1, 0, 0, 0, 296, 1150, 1, 0, 0, 0, 298, 1154, 1, 0, 0, 0, 300, 1158, 1, 0, 0, 0, 302, 1162, 1, 0, 0, 0, 304, 1166, 1, 0, 0, 0, 306, 1171, 1, 0, 0, 0, 308, 1176, 1, 0, 0, 0, 310, 1180, 1, 0, 0, 0, 312, 1184, 1, 0, 0, 0, 314, 1188, 1, 0, 0, 0, 316, 1193, 1, 0, 0, 0, 318, 1203, 1, 0, 0, 0, 320, 1207, 1, 0, 0, 0, 322, 1211, 1, 0, 0, 0, 324, 1215, 1, 0, 0, 0, 326, 1220, 1, 0, 0, 0, 328, 1227, 1, 0, 0, 0, 330, 1231, 1, 0, 0, 0, 332, 1235, 1, 0, 0, 0, 334, 1239, 1, 0, 0, 0, 336, 1243, 1, 0, 0, 0, 338, 1248, 1, 0, 0, 0, 340, 1254, 1, 0, 0, 0, 342, 1258, 1, 0, 0, 0, 344, 1262, 1, 0, 0, 0, 346, 1266, 1, 0, 0, 0, 348, 1272, 1, 0, 0, 0, 350, 1276, 1, 0, 0, 0, 352, 1280, 1, 0, 0, 0, 354, 1284, 1, 0, 0, 0, 356, 1290, 1, 0, 0, 0, 358, 1296, 1, 0, 0, 0, 360, 1302, 1, 0, 0, 0, 362, 363, 5, 100, 0, 0, 363, 364, 5, 105, 0, 0, 364, 365, 5, 115, 0, 0, 365, 366, 5, 115, 0, 0, 366, 367, 5, 101, 0, 0, 367, 368, 5, 99, 0, 0, 368, 369, 5, 116, 0, 0, 369, 370, 1, 0, 0, 0, 370, 371, 6, 0, 0, 0, 371, 15, 1, 0, 0, 0, 372, 373, 5, 100, 0, 0, 373, 374, 5, 114, 0, 0, 374, 375, 5, 111, 0, 0, 375, 376, 5, 112, 0, 0, 376, 377, 1, 0, 0, 0, 377, 378, 6, 1, 1, 0, 378, 17, 1, 0, 0, 0, 379, 380, 5, 101, 0, 0, 380, 381, 5, 110, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 105, 0, 0, 383, 384, 5, 99, 0, 0, 384, 385, 5, 104, 0, 0, 385, 386, 1, 0, 0, 0, 386, 387, 6, 2, 2, 0, 387, 19, 1, 0, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 118, 0, 0, 390, 391, 5, 97, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 1, 0, 0, 0, 393, 394, 6, 3, 0, 0, 394, 21, 1, 0, 0, 0, 395, 396, 5, 101, 0, 0, 396, 397, 5, 120, 0, 0, 397, 398, 5, 112, 0, 0, 398, 399, 5, 108, 0, 0, 399, 400, 5, 97, 0, 0, 400, 401, 5, 105, 0, 0, 401, 402, 5, 110, 0, 0, 402, 403, 1, 0, 0, 0, 403, 404, 6, 4, 3, 0, 404, 23, 1, 0, 0, 0, 405, 406, 5, 102, 0, 0, 406, 407, 5, 114, 0, 0, 407, 408, 5, 111, 0, 0, 408, 409, 5, 109, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 5, 4, 0, 411, 25, 1, 0, 0, 0, 412, 413, 5, 103, 0, 0, 413, 414, 5, 114, 0, 0, 414, 415, 5, 111, 0, 0, 415, 416, 5, 107, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 6, 0, 0, 418, 27, 1, 0, 0, 0, 419, 420, 5, 105, 0, 0, 420, 421, 5, 110, 0, 0, 421, 422, 5, 108, 0, 0, 422, 423, 5, 105, 0, 0, 423, 424, 5, 110, 0, 0, 424, 425, 5, 101, 0, 0, 425, 426, 5, 115, 0, 0, 426, 427, 5, 116, 0, 0, 427, 428, 5, 97, 0, 0, 428, 429, 5, 116, 0, 0, 429, 430, 5, 115, 0, 0, 430, 431, 1, 0, 0, 0, 431, 432, 6, 7, 0, 0, 432, 29, 1, 0, 0, 0, 433, 434, 5, 107, 0, 0, 434, 435, 5, 101, 0, 0, 435, 436, 5, 101, 0, 0, 436, 437, 5, 112, 0, 0, 437, 438, 1, 0, 0, 0, 438, 439, 6, 8, 1, 0, 439, 31, 1, 0, 0, 0, 440, 441, 5, 108, 0, 0, 441, 442, 5, 105, 0, 0, 442, 443, 5, 109, 0, 0, 443, 444, 5, 105, 0, 0, 444, 445, 5, 116, 0, 0, 445, 446, 1, 0, 0, 0, 446, 447, 6, 9, 0, 0, 447, 33, 1, 0, 0, 0, 448, 449, 5, 109, 0, 0, 449, 450, 5, 101, 0, 0, 450, 451, 5, 116, 0, 0, 451, 452, 5, 97, 0, 0, 452, 453, 1, 0, 0, 0, 453, 454, 6, 10, 5, 0, 454, 35, 1, 0, 0, 0, 455, 456, 5, 109, 0, 0, 456, 457, 5, 101, 0, 0, 457, 458, 5, 116, 0, 0, 458, 459, 5, 114, 0, 0, 459, 460, 5, 105, 0, 0, 460, 461, 5, 99, 0, 0, 461, 462, 5, 115, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 6, 11, 6, 0, 464, 37, 1, 0, 0, 0, 465, 466, 5, 109, 0, 0, 466, 467, 5, 118, 0, 0, 467, 468, 5, 95, 0, 0, 468, 469, 5, 101, 0, 0, 469, 470, 5, 120, 0, 0, 470, 471, 5, 112, 0, 0, 471, 472, 5, 97, 0, 0, 472, 473, 5, 110, 0, 0, 473, 474, 5, 100, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 12, 7, 0, 476, 39, 1, 0, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 5, 110, 0, 0, 480, 481, 5, 97, 0, 0, 481, 482, 5, 109, 0, 0, 482, 483, 5, 101, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 6, 13, 8, 0, 485, 41, 1, 0, 0, 0, 486, 487, 5, 114, 0, 0, 487, 488, 5, 111, 0, 0, 488, 489, 5, 119, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 14, 0, 0, 491, 43, 1, 0, 0, 0, 492, 493, 5, 115, 0, 0, 493, 494, 5, 104, 0, 0, 494, 495, 5, 111, 0, 0, 495, 496, 5, 119, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 15, 9, 0, 498, 45, 1, 0, 0, 0, 499, 500, 5, 115, 0, 0, 500, 501, 5, 111, 0, 0, 501, 502, 5, 114, 0, 0, 502, 503, 5, 116, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 16, 0, 0, 505, 47, 1, 0, 0, 0, 506, 507, 5, 115, 0, 0, 507, 508, 5, 116, 0, 0, 508, 509, 5, 97, 0, 0, 509, 510, 5, 116, 0, 0, 510, 511, 5, 115, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 17, 0, 0, 513, 49, 1, 0, 0, 0, 514, 515, 5, 119, 0, 0, 515, 516, 5, 104, 0, 0, 516, 517, 5, 101, 0, 0, 517, 518, 5, 114, 0, 0, 518, 519, 5, 101, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 18, 0, 0, 521, 51, 1, 0, 0, 0, 522, 524, 8, 0, 0, 0, 523, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 523, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 6, 19, 0, 0, 528, 53, 1, 0, 0, 0, 529, 530, 5, 47, 0, 0, 530, 531, 5, 47, 0, 0, 531, 535, 1, 0, 0, 0, 532, 534, 8, 1, 0, 0, 533, 532, 1, 0, 0, 0, 534, 537, 1, 0, 0, 0, 535, 533, 1, 0, 0, 0, 535, 536, 1, 0, 0, 0, 536, 539, 1, 0, 0, 0, 537, 535, 1, 0, 0, 0, 538, 540, 5, 13, 0, 0, 539, 538, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 542, 1, 0, 0, 0, 541, 543, 5, 10, 0, 0, 542, 541, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 20, 10, 0, 545, 55, 1, 0, 0, 0, 546, 547, 5, 47, 0, 0, 547, 548, 5, 42, 0, 0, 548, 553, 1, 0, 0, 0, 549, 552, 3, 56, 21, 0, 550, 552, 9, 0, 0, 0, 551, 549, 1, 0, 0, 0, 551, 550, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 553, 551, 1, 0, 0, 0, 554, 556, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 556, 557, 5, 42, 0, 0, 557, 558, 5, 47, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 6, 21, 10, 0, 560, 57, 1, 0, 0, 0, 561, 563, 7, 2, 0, 0, 562, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 567, 6, 22, 10, 0, 567, 59, 1, 0, 0, 0, 568, 572, 8, 3, 0, 0, 569, 570, 5, 47, 0, 0, 570, 572, 8, 4, 0, 0, 571, 568, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 572, 61, 1, 0, 0, 0, 573, 575, 3, 60, 23, 0, 574, 573, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 63, 1, 0, 0, 0, 578, 579, 3, 172, 79, 0, 579, 580, 1, 0, 0, 0, 580, 581, 6, 25, 11, 0, 581, 582, 6, 25, 12, 0, 582, 65, 1, 0, 0, 0, 583, 584, 3, 74, 30, 0, 584, 585, 1, 0, 0, 0, 585, 586, 6, 26, 13, 0, 586, 587, 6, 26, 14, 0, 587, 67, 1, 0, 0, 0, 588, 589, 3, 58, 22, 0, 589, 590, 1, 0, 0, 0, 590, 591, 6, 27, 10, 0, 591, 69, 1, 0, 0, 0, 592, 593, 3, 54, 20, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 28, 10, 0, 595, 71, 1, 0, 0, 0, 596, 597, 3, 56, 21, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 29, 10, 0, 599, 73, 1, 0, 0, 0, 600, 601, 5, 124, 0, 0, 601, 602, 1, 0, 0, 0, 602, 603, 6, 30, 14, 0, 603, 75, 1, 0, 0, 0, 604, 605, 7, 5, 0, 0, 605, 77, 1, 0, 0, 0, 606, 607, 7, 6, 0, 0, 607, 79, 1, 0, 0, 0, 608, 609, 5, 92, 0, 0, 609, 610, 7, 7, 0, 0, 610, 81, 1, 0, 0, 0, 611, 612, 8, 8, 0, 0, 612, 83, 1, 0, 0, 0, 613, 615, 7, 9, 0, 0, 614, 616, 7, 10, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 619, 3, 76, 31, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 85, 1, 0, 0, 0, 622, 623, 5, 64, 0, 0, 623, 87, 1, 0, 0, 0, 624, 625, 5, 96, 0, 0, 625, 89, 1, 0, 0, 0, 626, 630, 8, 11, 0, 0, 627, 628, 5, 96, 0, 0, 628, 630, 5, 96, 0, 0, 629, 626, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 91, 1, 0, 0, 0, 631, 632, 5, 95, 0, 0, 632, 93, 1, 0, 0, 0, 633, 637, 3, 78, 32, 0, 634, 637, 3, 76, 31, 0, 635, 637, 3, 92, 39, 0, 636, 633, 1, 0, 0, 0, 636, 634, 1, 0, 0, 0, 636, 635, 1, 0, 0, 0, 637, 95, 1, 0, 0, 0, 638, 643, 5, 34, 0, 0, 639, 642, 3, 80, 33, 0, 640, 642, 3, 82, 34, 0, 641, 639, 1, 0, 0, 0, 641, 640, 1, 0, 0, 0, 642, 645, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 646, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 646, 668, 5, 34, 0, 0, 647, 648, 5, 34, 0, 0, 648, 649, 5, 34, 0, 0, 649, 650, 5, 34, 0, 0, 650, 654, 1, 0, 0, 0, 651, 653, 8, 1, 0, 0, 652, 651, 1, 0, 0, 0, 653, 656, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 654, 652, 1, 0, 0, 0, 655, 657, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 657, 658, 5, 34, 0, 0, 658, 659, 5, 34, 0, 0, 659, 660, 5, 34, 0, 0, 660, 662, 1, 0, 0, 0, 661, 663, 5, 34, 0, 0, 662, 661, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 665, 1, 0, 0, 0, 664, 666, 5, 34, 0, 0, 665, 664, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 638, 1, 0, 0, 0, 667, 647, 1, 0, 0, 0, 668, 97, 1, 0, 0, 0, 669, 671, 3, 76, 31, 0, 670, 669, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 670, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 99, 1, 0, 0, 0, 674, 676, 3, 76, 31, 0, 675, 674, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 677, 678, 1, 0, 0, 0, 678, 679, 1, 0, 0, 0, 679, 683, 3, 116, 51, 0, 680, 682, 3, 76, 31, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 717, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 688, 3, 116, 51, 0, 687, 689, 3, 76, 31, 0, 688, 687, 1, 0, 0, 0, 689, 690, 1, 0, 0, 0, 690, 688, 1, 0, 0, 0, 690, 691, 1, 0, 0, 0, 691, 717, 1, 0, 0, 0, 692, 694, 3, 76, 31, 0, 693, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 704, 1, 0, 0, 0, 697, 701, 3, 116, 51, 0, 698, 700, 3, 76, 31, 0, 699, 698, 1, 0, 0, 0, 700, 703, 1, 0, 0, 0, 701, 699, 1, 0, 0, 0, 701, 702, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 704, 697, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 3, 84, 35, 0, 707, 717, 1, 0, 0, 0, 708, 710, 3, 116, 51, 0, 709, 711, 3, 76, 31, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 715, 3, 84, 35, 0, 715, 717, 1, 0, 0, 0, 716, 675, 1, 0, 0, 0, 716, 686, 1, 0, 0, 0, 716, 693, 1, 0, 0, 0, 716, 708, 1, 0, 0, 0, 717, 101, 1, 0, 0, 0, 718, 719, 5, 98, 0, 0, 719, 720, 5, 121, 0, 0, 720, 103, 1, 0, 0, 0, 721, 722, 5, 97, 0, 0, 722, 723, 5, 110, 0, 0, 723, 724, 5, 100, 0, 0, 724, 105, 1, 0, 0, 0, 725, 726, 5, 97, 0, 0, 726, 727, 5, 115, 0, 0, 727, 728, 5, 99, 0, 0, 728, 107, 1, 0, 0, 0, 729, 730, 5, 61, 0, 0, 730, 109, 1, 0, 0, 0, 731, 732, 5, 58, 0, 0, 732, 733, 5, 58, 0, 0, 733, 111, 1, 0, 0, 0, 734, 735, 5, 44, 0, 0, 735, 113, 1, 0, 0, 0, 736, 737, 5, 100, 0, 0, 737, 738, 5, 101, 0, 0, 738, 739, 5, 115, 0, 0, 739, 740, 5, 99, 0, 0, 740, 115, 1, 0, 0, 0, 741, 742, 5, 46, 0, 0, 742, 117, 1, 0, 0, 0, 743, 744, 5, 102, 0, 0, 744, 745, 5, 97, 0, 0, 745, 746, 5, 108, 0, 0, 746, 747, 5, 115, 0, 0, 747, 748, 5, 101, 0, 0, 748, 119, 1, 0, 0, 0, 749, 750, 5, 102, 0, 0, 750, 751, 5, 105, 0, 0, 751, 752, 5, 114, 0, 0, 752, 753, 5, 115, 0, 0, 753, 754, 5, 116, 0, 0, 754, 121, 1, 0, 0, 0, 755, 756, 5, 108, 0, 0, 756, 757, 5, 97, 0, 0, 757, 758, 5, 115, 0, 0, 758, 759, 5, 116, 0, 0, 759, 123, 1, 0, 0, 0, 760, 761, 5, 40, 0, 0, 761, 125, 1, 0, 0, 0, 762, 763, 5, 105, 0, 0, 763, 764, 5, 110, 0, 0, 764, 127, 1, 0, 0, 0, 765, 766, 5, 105, 0, 0, 766, 767, 5, 115, 0, 0, 767, 129, 1, 0, 0, 0, 768, 769, 5, 108, 0, 0, 769, 770, 5, 105, 0, 0, 770, 771, 5, 107, 0, 0, 771, 772, 5, 101, 0, 0, 772, 131, 1, 0, 0, 0, 773, 774, 5, 110, 0, 0, 774, 775, 5, 111, 0, 0, 775, 776, 5, 116, 0, 0, 776, 133, 1, 0, 0, 0, 777, 778, 5, 110, 0, 0, 778, 779, 5, 117, 0, 0, 779, 780, 5, 108, 0, 0, 780, 781, 5, 108, 0, 0, 781, 135, 1, 0, 0, 0, 782, 783, 5, 110, 0, 0, 783, 784, 5, 117, 0, 0, 784, 785, 5, 108, 0, 0, 785, 786, 5, 108, 0, 0, 786, 787, 5, 115, 0, 0, 787, 137, 1, 0, 0, 0, 788, 789, 5, 111, 0, 0, 789, 790, 5, 114, 0, 0, 790, 139, 1, 0, 0, 0, 791, 792, 5, 63, 0, 0, 792, 141, 1, 0, 0, 0, 793, 794, 5, 114, 0, 0, 794, 795, 5, 108, 0, 0, 795, 796, 5, 105, 0, 0, 796, 797, 5, 107, 0, 0, 797, 798, 5, 101, 0, 0, 798, 143, 1, 0, 0, 0, 799, 800, 5, 41, 0, 0, 800, 145, 1, 0, 0, 0, 801, 802, 5, 116, 0, 0, 802, 803, 5, 114, 0, 0, 803, 804, 5, 117, 0, 0, 804, 805, 5, 101, 0, 0, 805, 147, 1, 0, 0, 0, 806, 807, 5, 61, 0, 0, 807, 808, 5, 61, 0, 0, 808, 149, 1, 0, 0, 0, 809, 810, 5, 61, 0, 0, 810, 811, 5, 126, 0, 0, 811, 151, 1, 0, 0, 0, 812, 813, 5, 33, 0, 0, 813, 814, 5, 61, 0, 0, 814, 153, 1, 0, 0, 0, 815, 816, 5, 60, 0, 0, 816, 155, 1, 0, 0, 0, 817, 818, 5, 60, 0, 0, 818, 819, 5, 61, 0, 0, 819, 157, 1, 0, 0, 0, 820, 821, 5, 62, 0, 0, 821, 159, 1, 0, 0, 0, 822, 823, 5, 62, 0, 0, 823, 824, 5, 61, 0, 0, 824, 161, 1, 0, 0, 0, 825, 826, 5, 43, 0, 0, 826, 163, 1, 0, 0, 0, 827, 828, 5, 45, 0, 0, 828, 165, 1, 0, 0, 0, 829, 830, 5, 42, 0, 0, 830, 167, 1, 0, 0, 0, 831, 832, 5, 47, 0, 0, 832, 169, 1, 0, 0, 0, 833, 834, 5, 37, 0, 0, 834, 171, 1, 0, 0, 0, 835, 836, 5, 91, 0, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 79, 0, 0, 838, 839, 6, 79, 0, 0, 839, 173, 1, 0, 0, 0, 840, 841, 5, 93, 0, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 80, 14, 0, 843, 844, 6, 80, 14, 0, 844, 175, 1, 0, 0, 0, 845, 849, 3, 78, 32, 0, 846, 848, 3, 94, 40, 0, 847, 846, 1, 0, 0, 0, 848, 851, 1, 0, 0, 0, 849, 847, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 862, 1, 0, 0, 0, 851, 849, 1, 0, 0, 0, 852, 855, 3, 92, 39, 0, 853, 855, 3, 86, 36, 0, 854, 852, 1, 0, 0, 0, 854, 853, 1, 0, 0, 0, 855, 857, 1, 0, 0, 0, 856, 858, 3, 94, 40, 0, 857, 856, 1, 0, 0, 0, 858, 859, 1, 0, 0, 0, 859, 857, 1, 0, 0, 0, 859, 860, 1, 0, 0, 0, 860, 862, 1, 0, 0, 0, 861, 845, 1, 0, 0, 0, 861, 854, 1, 0, 0, 0, 862, 177, 1, 0, 0, 0, 863, 865, 3, 88, 37, 0, 864, 866, 3, 90, 38, 0, 865, 864, 1, 0, 0, 0, 866, 867, 1, 0, 0, 0, 867, 865, 1, 0, 0, 0, 867, 868, 1, 0, 0, 0, 868, 869, 1, 0, 0, 0, 869, 870, 3, 88, 37, 0, 870, 179, 1, 0, 0, 0, 871, 872, 3, 178, 82, 0, 872, 181, 1, 0, 0, 0, 873, 874, 3, 54, 20, 0, 874, 875, 1, 0, 0, 0, 875, 876, 6, 84, 10, 0, 876, 183, 1, 0, 0, 0, 877, 878, 3, 56, 21, 0, 878, 879, 1, 0, 0, 0, 879, 880, 6, 85, 10, 0, 880, 185, 1, 0, 0, 0, 881, 882, 3, 58, 22, 0, 882, 883, 1, 0, 0, 0, 883, 884, 6, 86, 10, 0, 884, 187, 1, 0, 0, 0, 885, 886, 3, 74, 30, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 87, 13, 0, 888, 889, 6, 87, 14, 0, 889, 189, 1, 0, 0, 0, 890, 891, 3, 172, 79, 0, 891, 892, 1, 0, 0, 0, 892, 893, 6, 88, 11, 0, 893, 191, 1, 0, 0, 0, 894, 895, 3, 174, 80, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 89, 15, 0, 897, 193, 1, 0, 0, 0, 898, 899, 3, 112, 49, 0, 899, 900, 1, 0, 0, 0, 900, 901, 6, 90, 16, 0, 901, 195, 1, 0, 0, 0, 902, 903, 3, 108, 47, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 91, 17, 0, 905, 197, 1, 0, 0, 0, 906, 907, 3, 96, 41, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 92, 18, 0, 909, 199, 1, 0, 0, 0, 910, 911, 5, 111, 0, 0, 911, 912, 5, 112, 0, 0, 912, 913, 5, 116, 0, 0, 913, 914, 5, 105, 0, 0, 914, 915, 5, 111, 0, 0, 915, 916, 5, 110, 0, 0, 916, 917, 5, 115, 0, 0, 917, 201, 1, 0, 0, 0, 918, 919, 5, 109, 0, 0, 919, 920, 5, 101, 0, 0, 920, 921, 5, 116, 0, 0, 921, 922, 5, 97, 0, 0, 922, 923, 5, 100, 0, 0, 923, 924, 5, 97, 0, 0, 924, 925, 5, 116, 0, 0, 925, 926, 5, 97, 0, 0, 926, 203, 1, 0, 0, 0, 927, 928, 3, 62, 24, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 95, 19, 0, 930, 205, 1, 0, 0, 0, 931, 932, 3, 54, 20, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 96, 10, 0, 934, 207, 1, 0, 0, 0, 935, 936, 3, 56, 21, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 97, 10, 0, 938, 209, 1, 0, 0, 0, 939, 940, 3, 58, 22, 0, 940, 941, 1, 0, 0, 0, 941, 942, 6, 98, 10, 0, 942, 211, 1, 0, 0, 0, 943, 944, 3, 74, 30, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 99, 13, 0, 946, 947, 6, 99, 14, 0, 947, 213, 1, 0, 0, 0, 948, 949, 3, 116, 51, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 100, 20, 0, 951, 215, 1, 0, 0, 0, 952, 953, 3, 112, 49, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 101, 16, 0, 955, 217, 1, 0, 0, 0, 956, 961, 3, 78, 32, 0, 957, 961, 3, 76, 31, 0, 958, 961, 3, 92, 39, 0, 959, 961, 3, 166, 76, 0, 960, 956, 1, 0, 0, 0, 960, 957, 1, 0, 0, 0, 960, 958, 1, 0, 0, 0, 960, 959, 1, 0, 0, 0, 961, 219, 1, 0, 0, 0, 962, 965, 3, 78, 32, 0, 963, 965, 3, 166, 76, 0, 964, 962, 1, 0, 0, 0, 964, 963, 1, 0, 0, 0, 965, 969, 1, 0, 0, 0, 966, 968, 3, 218, 102, 0, 967, 966, 1, 0, 0, 0, 968, 971, 1, 0, 0, 0, 969, 967, 1, 0, 0, 0, 969, 970, 1, 0, 0, 0, 970, 982, 1, 0, 0, 0, 971, 969, 1, 0, 0, 0, 972, 975, 3, 92, 39, 0, 973, 975, 3, 86, 36, 0, 974, 972, 1, 0, 0, 0, 974, 973, 1, 0, 0, 0, 975, 977, 1, 0, 0, 0, 976, 978, 3, 218, 102, 0, 977, 976, 1, 0, 0, 0, 978, 979, 1, 0, 0, 0, 979, 977, 1, 0, 0, 0, 979, 980, 1, 0, 0, 0, 980, 982, 1, 0, 0, 0, 981, 964, 1, 0, 0, 0, 981, 974, 1, 0, 0, 0, 982, 221, 1, 0, 0, 0, 983, 986, 3, 220, 103, 0, 984, 986, 3, 178, 82, 0, 985, 983, 1, 0, 0, 0, 985, 984, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 985, 1, 0, 0, 0, 987, 988, 1, 0, 0, 0, 988, 223, 1, 0, 0, 0, 989, 990, 3, 54, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 105, 10, 0, 992, 225, 1, 0, 0, 0, 993, 994, 3, 56, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 106, 10, 0, 996, 227, 1, 0, 0, 0, 997, 998, 3, 58, 22, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 107, 10, 0, 1000, 229, 1, 0, 0, 0, 1001, 1002, 3, 74, 30, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 108, 13, 0, 1004, 1005, 6, 108, 14, 0, 1005, 231, 1, 0, 0, 0, 1006, 1007, 3, 108, 47, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 109, 17, 0, 1009, 233, 1, 0, 0, 0, 1010, 1011, 3, 112, 49, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 110, 16, 0, 1013, 235, 1, 0, 0, 0, 1014, 1015, 3, 116, 51, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 111, 20, 0, 1017, 237, 1, 0, 0, 0, 1018, 1019, 5, 97, 0, 0, 1019, 1020, 5, 115, 0, 0, 1020, 239, 1, 0, 0, 0, 1021, 1022, 3, 222, 104, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 113, 21, 0, 1024, 241, 1, 0, 0, 0, 1025, 1026, 3, 54, 20, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 114, 10, 0, 1028, 243, 1, 0, 0, 0, 1029, 1030, 3, 56, 21, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 115, 10, 0, 1032, 245, 1, 0, 0, 0, 1033, 1034, 3, 58, 22, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 116, 10, 0, 1036, 247, 1, 0, 0, 0, 1037, 1038, 3, 74, 30, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 117, 13, 0, 1040, 1041, 6, 117, 14, 0, 1041, 249, 1, 0, 0, 0, 1042, 1043, 3, 172, 79, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 118, 11, 0, 1045, 1046, 6, 118, 22, 0, 1046, 251, 1, 0, 0, 0, 1047, 1048, 5, 111, 0, 0, 1048, 1049, 5, 110, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 119, 23, 0, 1051, 253, 1, 0, 0, 0, 1052, 1053, 5, 119, 0, 0, 1053, 1054, 5, 105, 0, 0, 1054, 1055, 5, 116, 0, 0, 1055, 1056, 5, 104, 0, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 120, 23, 0, 1058, 255, 1, 0, 0, 0, 1059, 1060, 8, 12, 0, 0, 1060, 257, 1, 0, 0, 0, 1061, 1063, 3, 256, 121, 0, 1062, 1061, 1, 0, 0, 0, 1063, 1064, 1, 0, 0, 0, 1064, 1062, 1, 0, 0, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 3, 326, 156, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1062, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1071, 1, 0, 0, 0, 1070, 1072, 3, 256, 121, 0, 1071, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1071, 1, 0, 0, 0, 1073, 1074, 1, 0, 0, 0, 1074, 259, 1, 0, 0, 0, 1075, 1076, 3, 180, 83, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 123, 24, 0, 1078, 261, 1, 0, 0, 0, 1079, 1080, 3, 258, 122, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 124, 25, 0, 1082, 263, 1, 0, 0, 0, 1083, 1084, 3, 54, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 125, 10, 0, 1086, 265, 1, 0, 0, 0, 1087, 1088, 3, 56, 21, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 126, 10, 0, 1090, 267, 1, 0, 0, 0, 1091, 1092, 3, 58, 22, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 127, 10, 0, 1094, 269, 1, 0, 0, 0, 1095, 1096, 3, 74, 30, 0, 1096, 1097, 1, 0, 0, 0, 1097, 1098, 6, 128, 13, 0, 1098, 1099, 6, 128, 14, 0, 1099, 1100, 6, 128, 14, 0, 1100, 271, 1, 0, 0, 0, 1101, 1102, 3, 108, 47, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 129, 17, 0, 1104, 273, 1, 0, 0, 0, 1105, 1106, 3, 112, 49, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 130, 16, 0, 1108, 275, 1, 0, 0, 0, 1109, 1110, 3, 116, 51, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 131, 20, 0, 1112, 277, 1, 0, 0, 0, 1113, 1114, 3, 254, 120, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 132, 26, 0, 1116, 279, 1, 0, 0, 0, 1117, 1118, 3, 222, 104, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 133, 21, 0, 1120, 281, 1, 0, 0, 0, 1121, 1122, 3, 180, 83, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 134, 24, 0, 1124, 283, 1, 0, 0, 0, 1125, 1126, 3, 54, 20, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1128, 6, 135, 10, 0, 1128, 285, 1, 0, 0, 0, 1129, 1130, 3, 56, 21, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 136, 10, 0, 1132, 287, 1, 0, 0, 0, 1133, 1134, 3, 58, 22, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1136, 6, 137, 10, 0, 1136, 289, 1, 0, 0, 0, 1137, 1138, 3, 74, 30, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 138, 13, 0, 1140, 1141, 6, 138, 14, 0, 1141, 291, 1, 0, 0, 0, 1142, 1143, 3, 116, 51, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 139, 20, 0, 1145, 293, 1, 0, 0, 0, 1146, 1147, 3, 180, 83, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 140, 24, 0, 1149, 295, 1, 0, 0, 0, 1150, 1151, 3, 176, 81, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 141, 27, 0, 1153, 297, 1, 0, 0, 0, 1154, 1155, 3, 54, 20, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 142, 10, 0, 1157, 299, 1, 0, 0, 0, 1158, 1159, 3, 56, 21, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 143, 10, 0, 1161, 301, 1, 0, 0, 0, 1162, 1163, 3, 58, 22, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 144, 10, 0, 1165, 303, 1, 0, 0, 0, 1166, 1167, 3, 74, 30, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 145, 13, 0, 1169, 1170, 6, 145, 14, 0, 1170, 305, 1, 0, 0, 0, 1171, 1172, 5, 105, 0, 0, 1172, 1173, 5, 110, 0, 0, 1173, 1174, 5, 102, 0, 0, 1174, 1175, 5, 111, 0, 0, 1175, 307, 1, 0, 0, 0, 1176, 1177, 3, 54, 20, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 147, 10, 0, 1179, 309, 1, 0, 0, 0, 1180, 1181, 3, 56, 21, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 148, 10, 0, 1183, 311, 1, 0, 0, 0, 1184, 1185, 3, 58, 22, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 149, 10, 0, 1187, 313, 1, 0, 0, 0, 1188, 1189, 3, 74, 30, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 150, 13, 0, 1191, 1192, 6, 150, 14, 0, 1192, 315, 1, 0, 0, 0, 1193, 1194, 5, 102, 0, 0, 1194, 1195, 5, 117, 0, 0, 1195, 1196, 5, 110, 0, 0, 1196, 1197, 5, 99, 0, 0, 1197, 1198, 5, 116, 0, 0, 1198, 1199, 5, 105, 0, 0, 1199, 1200, 5, 111, 0, 0, 1200, 1201, 5, 110, 0, 0, 1201, 1202, 5, 115, 0, 0, 1202, 317, 1, 0, 0, 0, 1203, 1204, 3, 54, 20, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 152, 10, 0, 1206, 319, 1, 0, 0, 0, 1207, 1208, 3, 56, 21, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 153, 10, 0, 1210, 321, 1, 0, 0, 0, 1211, 1212, 3, 58, 22, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 154, 10, 0, 1214, 323, 1, 0, 0, 0, 1215, 1216, 3, 174, 80, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 155, 15, 0, 1218, 1219, 6, 155, 14, 0, 1219, 325, 1, 0, 0, 0, 1220, 1221, 5, 58, 0, 0, 1221, 327, 1, 0, 0, 0, 1222, 1228, 3, 86, 36, 0, 1223, 1228, 3, 76, 31, 0, 1224, 1228, 3, 116, 51, 0, 1225, 1228, 3, 78, 32, 0, 1226, 1228, 3, 92, 39, 0, 1227, 1222, 1, 0, 0, 0, 1227, 1223, 1, 0, 0, 0, 1227, 1224, 1, 0, 0, 0, 1227, 1225, 1, 0, 0, 0, 1227, 1226, 1, 0, 0, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1227, 1, 0, 0, 0, 1229, 1230, 1, 0, 0, 0, 1230, 329, 1, 0, 0, 0, 1231, 1232, 3, 54, 20, 0, 1232, 1233, 1, 0, 0, 0, 1233, 1234, 6, 158, 10, 0, 1234, 331, 1, 0, 0, 0, 1235, 1236, 3, 56, 21, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 6, 159, 10, 0, 1238, 333, 1, 0, 0, 0, 1239, 1240, 3, 58, 22, 0, 1240, 1241, 1, 0, 0, 0, 1241, 1242, 6, 160, 10, 0, 1242, 335, 1, 0, 0, 0, 1243, 1244, 3, 74, 30, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1246, 6, 161, 13, 0, 1246, 1247, 6, 161, 14, 0, 1247, 337, 1, 0, 0, 0, 1248, 1249, 3, 62, 24, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 162, 19, 0, 1251, 1252, 6, 162, 14, 0, 1252, 1253, 6, 162, 28, 0, 1253, 339, 1, 0, 0, 0, 1254, 1255, 3, 54, 20, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 163, 10, 0, 1257, 341, 1, 0, 0, 0, 1258, 1259, 3, 56, 21, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 164, 10, 0, 1261, 343, 1, 0, 0, 0, 1262, 1263, 3, 58, 22, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 165, 10, 0, 1265, 345, 1, 0, 0, 0, 1266, 1267, 3, 112, 49, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1269, 6, 166, 16, 0, 1269, 1270, 6, 166, 14, 0, 1270, 1271, 6, 166, 6, 0, 1271, 347, 1, 0, 0, 0, 1272, 1273, 3, 54, 20, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 167, 10, 0, 1275, 349, 1, 0, 0, 0, 1276, 1277, 3, 56, 21, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 168, 10, 0, 1279, 351, 1, 0, 0, 0, 1280, 1281, 3, 58, 22, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 169, 10, 0, 1283, 353, 1, 0, 0, 0, 1284, 1285, 3, 180, 83, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 170, 14, 0, 1287, 1288, 6, 170, 0, 0, 1288, 1289, 6, 170, 24, 0, 1289, 355, 1, 0, 0, 0, 1290, 1291, 3, 176, 81, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 171, 14, 0, 1293, 1294, 6, 171, 0, 0, 1294, 1295, 6, 171, 27, 0, 1295, 357, 1, 0, 0, 0, 1296, 1297, 3, 102, 44, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 172, 14, 0, 1299, 1300, 6, 172, 0, 0, 1300, 1301, 6, 172, 29, 0, 1301, 359, 1, 0, 0, 0, 1302, 1303, 3, 74, 30, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 173, 13, 0, 1305, 1306, 6, 173, 14, 0, 1306, 361, 1, 0, 0, 0, 60, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 525, 535, 539, 542, 551, 553, 564, 571, 576, 615, 620, 629, 636, 641, 643, 654, 662, 665, 667, 672, 677, 683, 690, 695, 701, 704, 712, 716, 849, 854, 859, 861, 867, 960, 964, 969, 974, 979, 981, 985, 987, 1064, 1068, 1073, 1227, 1229, 30, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 12, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 67, 0, 5, 0, 0, 7, 28, 0, 4, 0, 0, 7, 68, 0, 7, 37, 0, 7, 35, 0, 7, 29, 0, 7, 24, 0, 7, 39, 0, 7, 79, 0, 5, 11, 0, 5, 7, 0, 7, 70, 0, 7, 89, 0, 7, 88, 0, 7, 69, 0, 5, 13, 0, 7, 32, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index ac3354d0aa907..d7a73eeb844d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -18,30 +18,32 @@ public class EsqlBaseLexer extends Lexer { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, - STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, - FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, - OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, - METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, - FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, - PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, - RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, - ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, - ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, - MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, - SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, - META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, - SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; + KEEP=9, LIMIT=10, META=11, METRICS=12, MV_EXPAND=13, RENAME=14, ROW=15, + SHOW=16, SORT=17, STATS=18, WHERE=19, UNKNOWN_CMD=20, LINE_COMMENT=21, + MULTILINE_COMMENT=22, WS=23, INDEX_UNQUOTED_IDENTIFIER=24, EXPLAIN_WS=25, + EXPLAIN_LINE_COMMENT=26, EXPLAIN_MULTILINE_COMMENT=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COMMA=37, DESC=38, DOT=39, FALSE=40, FIRST=41, LAST=42, LP=43, + IN=44, IS=45, LIKE=46, NOT=47, NULL=48, NULLS=49, OR=50, PARAM=51, RLIKE=52, + RP=53, TRUE=54, EQ=55, CIEQ=56, NEQ=57, LT=58, LTE=59, GT=60, GTE=61, + PLUS=62, MINUS=63, ASTERISK=64, SLASH=65, PERCENT=66, OPENING_BRACKET=67, + CLOSING_BRACKET=68, UNQUOTED_IDENTIFIER=69, QUOTED_IDENTIFIER=70, EXPR_LINE_COMMENT=71, + EXPR_MULTILINE_COMMENT=72, EXPR_WS=73, OPTIONS=74, METADATA=75, FROM_LINE_COMMENT=76, + FROM_MULTILINE_COMMENT=77, FROM_WS=78, ID_PATTERN=79, PROJECT_LINE_COMMENT=80, + PROJECT_MULTILINE_COMMENT=81, PROJECT_WS=82, AS=83, RENAME_LINE_COMMENT=84, + RENAME_MULTILINE_COMMENT=85, RENAME_WS=86, ON=87, WITH=88, ENRICH_POLICY_NAME=89, + ENRICH_LINE_COMMENT=90, ENRICH_MULTILINE_COMMENT=91, ENRICH_WS=92, ENRICH_FIELD_LINE_COMMENT=93, + ENRICH_FIELD_MULTILINE_COMMENT=94, ENRICH_FIELD_WS=95, MVEXPAND_LINE_COMMENT=96, + MVEXPAND_MULTILINE_COMMENT=97, MVEXPAND_WS=98, INFO=99, SHOW_LINE_COMMENT=100, + SHOW_MULTILINE_COMMENT=101, SHOW_WS=102, FUNCTIONS=103, META_LINE_COMMENT=104, + META_MULTILINE_COMMENT=105, META_WS=106, COLON=107, SETTING=108, SETTING_LINE_COMMENT=109, + SETTTING_MULTILINE_COMMENT=110, SETTING_WS=111, METRICS_LINE_COMMENT=112, + METRICS_MULTILINE_COMMENT=113, METRICS_WS=114, CLOSING_METRICS_LINE_COMMENT=115, + CLOSING_METRICS_MULTILINE_COMMENT=116, CLOSING_METRICS_WS=117; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, - SETTING_MODE=11; + SETTING_MODE=11, METRICS_MODE=12, CLOSING_METRICS_MODE=13; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -49,15 +51,16 @@ public class EsqlBaseLexer extends Lexer { public static String[] modeNames = { "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "META_MODE", "SETTING_MODE" + "META_MODE", "SETTING_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", - "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", - "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", "ROW", "SHOW", + "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "WS", "INDEX_UNQUOTED_IDENTIFIER_PART", "INDEX_UNQUOTED_IDENTIFIER", + "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", @@ -69,23 +72,27 @@ private static String[] makeRuleNames() { "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", "OPTIONS", "METADATA", - "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", - "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", - "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", - "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", - "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", - "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", - "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", + "FROM_INDEX_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", + "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", + "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", + "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", + "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", + "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", + "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_PIPE", "METRICS_INDEX_UNQUOTED_IDENTIFIER", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } public static final String[] ruleNames = makeRuleNames(); @@ -93,15 +100,15 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'options'", "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'metrics'", + "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", + null, null, null, null, null, null, null, null, "'|'", null, null, null, + "'by'", "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", + "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + null, "']'", null, null, null, null, null, "'options'", "'metadata'", + null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" }; @@ -110,25 +117,28 @@ private static String[] makeLiteralNames() { private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", + "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "OPTIONS", "METADATA", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -191,185 +201,204 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000n\u04b3\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000u\u051b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002"+ - "A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002"+ - "F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002"+ - "K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002"+ - "P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002"+ - "U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002"+ - "Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002"+ - "_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002"+ - "d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002"+ - "i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002"+ - "n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002"+ - "s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002"+ - "x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002"+ - "}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080"+ - "\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083"+ - "\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086"+ - "\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089"+ - "\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c"+ - "\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f"+ - "\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092"+ - "\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095"+ - "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ - "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ - "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ - "\u0002\u009f\u0007\u009f\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"+ + "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007"+ + "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007"+ + "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n"+ + "\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002"+ + "\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002"+ + "\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002"+ + "\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002"+ + "\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002"+ + "\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002"+ + "\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002"+ + " \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002"+ + "%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002"+ + "*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002"+ + "/\u0007/\u00020\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u0002"+ + "4\u00074\u00025\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u0002"+ + "9\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002"+ + ">\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002"+ + "C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002"+ + "H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002"+ + "M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002"+ + "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002"+ + "W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002"+ + "\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002"+ + "a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002"+ + "f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002"+ + "k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002"+ + "p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002"+ + "u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002"+ + "z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002"+ + "\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002"+ + "\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002"+ + "\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002"+ + "\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002"+ + "\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002"+ + "\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002\u0090\u0007\u0090\u0002"+ + "\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002\u0093\u0007\u0093\u0002"+ + "\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002\u0096\u0007\u0096\u0002"+ + "\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002\u0099\u0007\u0099\u0002"+ + "\u009a\u0007\u009a\u0002\u009b\u0007\u009b\u0002\u009c\u0007\u009c\u0002"+ + "\u009d\u0007\u009d\u0002\u009e\u0007\u009e\u0002\u009f\u0007\u009f\u0002"+ + "\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1\u0002\u00a2\u0007\u00a2\u0002"+ + "\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4\u0002\u00a5\u0007\u00a5\u0002"+ + "\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7\u0002\u00a8\u0007\u00a8\u0002"+ + "\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa\u0002\u00ab\u0007\u00ab\u0002"+ + "\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01e4\b\u0012\u000b\u0012\f"+ - "\u0012\u01e5\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0005\u0013\u01ee\b\u0013\n\u0013\f\u0013\u01f1\t\u0013\u0001"+ - "\u0013\u0003\u0013\u01f4\b\u0013\u0001\u0013\u0003\u0013\u01f7\b\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u0200\b\u0014\n\u0014\f\u0014\u0203\t\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ - "\u0015\u020b\b\u0015\u000b\u0015\f\u0015\u020c\u0001\u0015\u0001\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ - " \u0003 \u0236\b \u0001 \u0004 \u0239\b \u000b \f \u023a\u0001!\u0001"+ - "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0244\b#\u0001$\u0001$\u0001"+ - "%\u0001%\u0001%\u0003%\u024b\b%\u0001&\u0001&\u0001&\u0005&\u0250\b&\n"+ - "&\f&\u0253\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u025b\b"+ - "&\n&\f&\u025e\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0265\b&\u0001"+ - "&\u0003&\u0268\b&\u0003&\u026a\b&\u0001\'\u0004\'\u026d\b\'\u000b\'\f"+ - "\'\u026e\u0001(\u0004(\u0272\b(\u000b(\f(\u0273\u0001(\u0001(\u0005(\u0278"+ - "\b(\n(\f(\u027b\t(\u0001(\u0001(\u0004(\u027f\b(\u000b(\f(\u0280\u0001"+ - "(\u0004(\u0284\b(\u000b(\f(\u0285\u0001(\u0001(\u0005(\u028a\b(\n(\f("+ - "\u028d\t(\u0003(\u028f\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0295\b("+ - "\u000b(\f(\u0296\u0001(\u0001(\u0003(\u029b\b(\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001"+ - "0\u00010\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "2\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u0001"+ - "4\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ - "9\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001"+ - "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ - "E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ - "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ - "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005N\u031e\bN\nN\fN\u0321\tN\u0001"+ - "N\u0001N\u0003N\u0325\bN\u0001N\u0004N\u0328\bN\u000bN\fN\u0329\u0003"+ - "N\u032c\bN\u0001O\u0001O\u0004O\u0330\bO\u000bO\fO\u0331\u0001O\u0001"+ - "O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ - "R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001"+ - "[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003"+ - "\\\u0371\b\\\u0001]\u0004]\u0374\b]\u000b]\f]\u0375\u0001^\u0001^\u0001"+ - "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0003e\u0399\be\u0001f\u0001f\u0003f\u039d\bf\u0001f\u0005f\u03a0\b"+ - "f\nf\ff\u03a3\tf\u0001f\u0001f\u0003f\u03a7\bf\u0001f\u0004f\u03aa\bf"+ - "\u000bf\ff\u03ab\u0003f\u03ae\bf\u0001g\u0001g\u0004g\u03b2\bg\u000bg"+ - "\fg\u03b3\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001"+ - "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ - "n\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ - "q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001"+ - "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0013\u0004\u0013\u020c\b\u0013\u000b\u0013\f\u0013\u020d\u0001\u0013"+ + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ + "\u0216\b\u0014\n\u0014\f\u0014\u0219\t\u0014\u0001\u0014\u0003\u0014\u021c"+ + "\b\u0014\u0001\u0014\u0003\u0014\u021f\b\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015"+ + "\u0228\b\u0015\n\u0015\f\u0015\u022b\t\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004\u0016\u0233\b\u0016\u000b"+ + "\u0016\f\u0016\u0234\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0003\u0017\u023c\b\u0017\u0001\u0018\u0004\u0018\u023f\b\u0018"+ + "\u000b\u0018\f\u0018\u0240\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ + "\u0001 \u0001 \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0003"+ + "#\u0268\b#\u0001#\u0004#\u026b\b#\u000b#\f#\u026c\u0001$\u0001$\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0003&\u0276\b&\u0001\'\u0001\'\u0001(\u0001"+ + "(\u0001(\u0003(\u027d\b(\u0001)\u0001)\u0001)\u0005)\u0282\b)\n)\f)\u0285"+ + "\t)\u0001)\u0001)\u0001)\u0001)\u0001)\u0001)\u0005)\u028d\b)\n)\f)\u0290"+ + "\t)\u0001)\u0001)\u0001)\u0001)\u0001)\u0003)\u0297\b)\u0001)\u0003)\u029a"+ + "\b)\u0003)\u029c\b)\u0001*\u0004*\u029f\b*\u000b*\f*\u02a0\u0001+\u0004"+ + "+\u02a4\b+\u000b+\f+\u02a5\u0001+\u0001+\u0005+\u02aa\b+\n+\f+\u02ad\t"+ + "+\u0001+\u0001+\u0004+\u02b1\b+\u000b+\f+\u02b2\u0001+\u0004+\u02b6\b"+ + "+\u000b+\f+\u02b7\u0001+\u0001+\u0005+\u02bc\b+\n+\f+\u02bf\t+\u0003+"+ + "\u02c1\b+\u0001+\u0001+\u0001+\u0001+\u0004+\u02c7\b+\u000b+\f+\u02c8"+ + "\u0001+\u0001+\u0003+\u02cd\b+\u0001,\u0001,\u0001,\u0001-\u0001-\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u0001"+ + "0\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u0001"+ + "4\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u00015\u0001"+ + "5\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00018\u0001"+ + "8\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ + "=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ + "@\u0001@\u0001@\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001"+ + "B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ + "E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001G\u0001H\u0001H\u0001I\u0001"+ + "I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ + "P\u0001P\u0001Q\u0001Q\u0005Q\u0350\bQ\nQ\fQ\u0353\tQ\u0001Q\u0001Q\u0003"+ + "Q\u0357\bQ\u0001Q\u0004Q\u035a\bQ\u000bQ\fQ\u035b\u0003Q\u035e\bQ\u0001"+ + "R\u0001R\u0004R\u0362\bR\u000bR\fR\u0363\u0001R\u0001R\u0001S\u0001S\u0001"+ + "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001"+ + "V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ + "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001"+ + "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001"+ + "]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001"+ + "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ + "c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001"+ + "e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0003f\u03c1\bf\u0001g\u0001"+ + "g\u0003g\u03c5\bg\u0001g\u0005g\u03c8\bg\ng\fg\u03cb\tg\u0001g\u0001g"+ + "\u0003g\u03cf\bg\u0001g\u0004g\u03d2\bg\u000bg\fg\u03d3\u0003g\u03d6\b"+ + "g\u0001h\u0001h\u0004h\u03da\bh\u000bh\fh\u03db\u0001i\u0001i\u0001i\u0001"+ + "i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ + "l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001"+ + "n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001q\u0001"+ + "q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001"+ + "s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001y\u0004y\u03ff\by\u000by\fy\u0400\u0001y\u0001"+ - "y\u0003y\u0405\by\u0001y\u0004y\u0408\by\u000by\fy\u0409\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f"+ - "\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ - "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ - "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097"+ - "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0004\u009c"+ - "\u04a4\b\u009c\u000b\u009c\f\u009c\u04a5\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0002\u0201\u025c\u0000\u00a0"+ - "\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018"+ - "\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011"+ - ".\u00120\u00132\u00144\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019"+ - "B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000"+ - "V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+"+ - "z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6\u00907"+ - "\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4"+ - "A\u00a6B\u00a8C\u00aa\u0000\u00acD\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6"+ - "\u0000\u00b8\u0000\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4"+ - "\u0000\u00c6J\u00c8\u0000\u00caK\u00ccL\u00ceM\u00d0\u0000\u00d2\u0000"+ - "\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00daN\u00dcO\u00deP\u00e0Q\u00e2"+ - "\u0000\u00e4\u0000\u00e6\u0000\u00e8\u0000\u00eaR\u00ec\u0000\u00eeS\u00f0"+ - "T\u00f2U\u00f4\u0000\u00f6\u0000\u00f8V\u00faW\u00fc\u0000\u00feX\u0100"+ - "\u0000\u0102\u0000\u0104Y\u0106Z\u0108[\u010a\u0000\u010c\u0000\u010e"+ - "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116\u0000\u0118\\\u011a]"+ - "\u011c^\u011e\u0000\u0120\u0000\u0122\u0000\u0124\u0000\u0126_\u0128`"+ - "\u012aa\u012c\u0000\u012eb\u0130c\u0132d\u0134e\u0136\u0000\u0138f\u013a"+ - "g\u013ch\u013ei\u0140\u0000\u0142j\u0144k\u0146l\u0148m\u014an\f\u0000"+ - "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t"+ - "\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u00000"+ - "9\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\"+ - "\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,/"+ - "/==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04ce"+ - "\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ + "x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001z\u0004"+ + "z\u0427\bz\u000bz\fz\u0428\u0001z\u0001z\u0003z\u042d\bz\u0001z\u0004"+ + "z\u0430\bz\u000bz\fz\u0431\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ + "|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001"+ + "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0004\u009d\u04cc\b\u009d\u000b"+ + "\u009d\f\u009d\u04cd\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ + "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001"+ + "\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001"+ + "\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001"+ + "\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001"+ + "\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ + "\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001"+ + "\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0002"+ + "\u0229\u028e\u0000\u00ae\u000e\u0001\u0010\u0002\u0012\u0003\u0014\u0004"+ + "\u0016\u0005\u0018\u0006\u001a\u0007\u001c\b\u001e\t \n\"\u000b$\f&\r"+ + "(\u000e*\u000f,\u0010.\u00110\u00122\u00134\u00146\u00158\u0016:\u0017"+ + "<\u0000>\u0018@\u0000B\u0000D\u0019F\u001aH\u001bJ\u001cL\u0000N\u0000"+ + "P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000`\u001db\u001e"+ + "d\u001ff h!j\"l#n$p%r&t\'v(x)z*|+~,\u0080-\u0082.\u0084/\u00860\u0088"+ + "1\u008a2\u008c3\u008e4\u00905\u00926\u00947\u00968\u00989\u009a:\u009c"+ + ";\u009e<\u00a0=\u00a2>\u00a4?\u00a6@\u00a8A\u00aaB\u00acC\u00aeD\u00b0"+ + "E\u00b2\u0000\u00b4F\u00b6G\u00b8H\u00baI\u00bc\u0000\u00be\u0000\u00c0"+ + "\u0000\u00c2\u0000\u00c4\u0000\u00c6\u0000\u00c8J\u00caK\u00cc\u0000\u00ce"+ + "L\u00d0M\u00d2N\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00da\u0000\u00dc"+ + "\u0000\u00deO\u00e0P\u00e2Q\u00e4R\u00e6\u0000\u00e8\u0000\u00ea\u0000"+ + "\u00ec\u0000\u00eeS\u00f0\u0000\u00f2T\u00f4U\u00f6V\u00f8\u0000\u00fa"+ + "\u0000\u00fcW\u00feX\u0100\u0000\u0102Y\u0104\u0000\u0106\u0000\u0108"+ + "Z\u010a[\u010c\\\u010e\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116"+ + "\u0000\u0118\u0000\u011a\u0000\u011c]\u011e^\u0120_\u0122\u0000\u0124"+ + "\u0000\u0126\u0000\u0128\u0000\u012a`\u012ca\u012eb\u0130\u0000\u0132"+ + "c\u0134d\u0136e\u0138f\u013a\u0000\u013cg\u013eh\u0140i\u0142j\u0144\u0000"+ + "\u0146k\u0148l\u014am\u014cn\u014eo\u0150\u0000\u0152\u0000\u0154p\u0156"+ + "q\u0158r\u015a\u0000\u015cs\u015et\u0160u\u0162\u0000\u0164\u0000\u0166"+ + "\u0000\u0168\u0000\u000e\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\f\r\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003"+ + "\u0000\t\n\r\r \n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u0001"+ + "\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r"+ + "\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\u000b\u0000\t\n"+ + "\r\r \"#,,//::<<>?\\\\||\u0534\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ @@ -379,575 +408,625 @@ public EsqlBaseLexer(CharStream input) { "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ - "\u0000\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001"+ - "<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001"+ - "\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000"+ - "\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000"+ - "\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b"+ - "\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000"+ - "\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000"+ - "\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p"+ - "\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000"+ - "\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000"+ - "\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~"+ - "\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082"+ - "\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086"+ - "\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a"+ - "\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e"+ - "\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092"+ - "\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096"+ - "\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a"+ - "\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e"+ - "\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2"+ - "\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6"+ - "\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000\u0000\u0002\u00ac"+ - "\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0"+ - "\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4"+ - "\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8"+ - "\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc"+ - "\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000\u0000\u0003\u00c0"+ - "\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000\u0000\u0003\u00c6"+ - "\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca"+ - "\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce"+ - "\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000\u0000\u0004\u00d2"+ - "\u0001\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00da"+ - "\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de"+ - "\u0001\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2"+ - "\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6"+ - "\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea"+ - "\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee"+ - "\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2"+ - "\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6"+ - "\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa"+ - "\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100"+ - "\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104"+ - "\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108"+ - "\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c"+ - "\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110"+ - "\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114"+ - "\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118"+ - "\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c"+ - "\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001"+ + "\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000"+ + ">\u0001\u0000\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0001B\u0001"+ + "\u0000\u0000\u0000\u0001D\u0001\u0000\u0000\u0000\u0001F\u0001\u0000\u0000"+ + "\u0000\u0001H\u0001\u0000\u0000\u0000\u0002J\u0001\u0000\u0000\u0000\u0002"+ + "`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001"+ + "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ + "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ + "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ + "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ + "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ + "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ + "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ + "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ + "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ + "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ + "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ + "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ + "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ + "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ + "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ + "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ + "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ + "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ + "\u0000\u0000\u0000\u0002\u00b4\u0001\u0000\u0000\u0000\u0002\u00b6\u0001"+ + "\u0000\u0000\u0000\u0002\u00b8\u0001\u0000\u0000\u0000\u0002\u00ba\u0001"+ + "\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001"+ + "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ + "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ + "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001"+ + "\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001"+ + "\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001"+ + "\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00d6\u0001"+ + "\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000\u0000\u0004\u00de\u0001"+ + "\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0004\u00e2\u0001"+ + "\u0000\u0000\u0000\u0004\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6\u0001"+ + "\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea\u0001"+ + "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee\u0001"+ + "\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2\u0001"+ + "\u0000\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005\u00f6\u0001"+ + "\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa\u0001"+ + "\u0000\u0000\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001"+ + "\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104\u0001"+ + "\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108\u0001"+ + "\u0000\u0000\u0000\u0006\u010a\u0001\u0000\u0000\u0000\u0006\u010c\u0001"+ + "\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110\u0001"+ + "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ + "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118\u0001"+ + "\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c\u0001"+ + "\u0000\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007\u0120\u0001"+ "\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000"+ "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ - "\u0000\b\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000"+ - "\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ - "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001"+ - "\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ + "\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c\u0001\u0000\u0000\u0000"+ + "\b\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ + "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\t\u0136\u0001"+ + "\u0000\u0000\u0000\t\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ "\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\n\u013e\u0001\u0000\u0000"+ - "\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000"+ - "\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000"+ - "\u0000\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000"+ - "\u0000\f\u014c\u0001\u0000\u0000\u0000\u000e\u0156\u0001\u0000\u0000\u0000"+ - "\u0010\u015d\u0001\u0000\u0000\u0000\u0012\u0166\u0001\u0000\u0000\u0000"+ - "\u0014\u016d\u0001\u0000\u0000\u0000\u0016\u0177\u0001\u0000\u0000\u0000"+ - "\u0018\u017e\u0001\u0000\u0000\u0000\u001a\u0185\u0001\u0000\u0000\u0000"+ - "\u001c\u0193\u0001\u0000\u0000\u0000\u001e\u019a\u0001\u0000\u0000\u0000"+ - " \u01a2\u0001\u0000\u0000\u0000\"\u01a9\u0001\u0000\u0000\u0000$\u01b5"+ - "\u0001\u0000\u0000\u0000&\u01be\u0001\u0000\u0000\u0000(\u01c4\u0001\u0000"+ - "\u0000\u0000*\u01cb\u0001\u0000\u0000\u0000,\u01d2\u0001\u0000\u0000\u0000"+ - ".\u01da\u0001\u0000\u0000\u00000\u01e3\u0001\u0000\u0000\u00002\u01e9"+ - "\u0001\u0000\u0000\u00004\u01fa\u0001\u0000\u0000\u00006\u020a\u0001\u0000"+ - "\u0000\u00008\u0210\u0001\u0000\u0000\u0000:\u0215\u0001\u0000\u0000\u0000"+ - "<\u021a\u0001\u0000\u0000\u0000>\u021e\u0001\u0000\u0000\u0000@\u0222"+ - "\u0001\u0000\u0000\u0000B\u0226\u0001\u0000\u0000\u0000D\u022a\u0001\u0000"+ - "\u0000\u0000F\u022c\u0001\u0000\u0000\u0000H\u022e\u0001\u0000\u0000\u0000"+ - "J\u0231\u0001\u0000\u0000\u0000L\u0233\u0001\u0000\u0000\u0000N\u023c"+ - "\u0001\u0000\u0000\u0000P\u023e\u0001\u0000\u0000\u0000R\u0243\u0001\u0000"+ - "\u0000\u0000T\u0245\u0001\u0000\u0000\u0000V\u024a\u0001\u0000\u0000\u0000"+ - "X\u0269\u0001\u0000\u0000\u0000Z\u026c\u0001\u0000\u0000\u0000\\\u029a"+ - "\u0001\u0000\u0000\u0000^\u029c\u0001\u0000\u0000\u0000`\u029f\u0001\u0000"+ - "\u0000\u0000b\u02a3\u0001\u0000\u0000\u0000d\u02a7\u0001\u0000\u0000\u0000"+ - "f\u02a9\u0001\u0000\u0000\u0000h\u02ac\u0001\u0000\u0000\u0000j\u02ae"+ - "\u0001\u0000\u0000\u0000l\u02b3\u0001\u0000\u0000\u0000n\u02b5\u0001\u0000"+ - "\u0000\u0000p\u02bb\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ - "t\u02c6\u0001\u0000\u0000\u0000v\u02c8\u0001\u0000\u0000\u0000x\u02cb"+ - "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d3\u0001\u0000"+ - "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dc\u0001\u0000\u0000"+ - "\u0000\u0082\u02e2\u0001\u0000\u0000\u0000\u0084\u02e5\u0001\u0000\u0000"+ - "\u0000\u0086\u02e7\u0001\u0000\u0000\u0000\u0088\u02ed\u0001\u0000\u0000"+ - "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f4\u0001\u0000\u0000"+ - "\u0000\u008e\u02f7\u0001\u0000\u0000\u0000\u0090\u02fa\u0001\u0000\u0000"+ - "\u0000\u0092\u02fd\u0001\u0000\u0000\u0000\u0094\u02ff\u0001\u0000\u0000"+ - "\u0000\u0096\u0302\u0001\u0000\u0000\u0000\u0098\u0304\u0001\u0000\u0000"+ - "\u0000\u009a\u0307\u0001\u0000\u0000\u0000\u009c\u0309\u0001\u0000\u0000"+ - "\u0000\u009e\u030b\u0001\u0000\u0000\u0000\u00a0\u030d\u0001\u0000\u0000"+ - "\u0000\u00a2\u030f\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ - "\u0000\u00a6\u0316\u0001\u0000\u0000\u0000\u00a8\u032b\u0001\u0000\u0000"+ - "\u0000\u00aa\u032d\u0001\u0000\u0000\u0000\u00ac\u0335\u0001\u0000\u0000"+ - "\u0000\u00ae\u0337\u0001\u0000\u0000\u0000\u00b0\u033b\u0001\u0000\u0000"+ - "\u0000\u00b2\u033f\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ - "\u0000\u00b6\u0348\u0001\u0000\u0000\u0000\u00b8\u034c\u0001\u0000\u0000"+ - "\u0000\u00ba\u0350\u0001\u0000\u0000\u0000\u00bc\u0354\u0001\u0000\u0000"+ - "\u0000\u00be\u0358\u0001\u0000\u0000\u0000\u00c0\u035c\u0001\u0000\u0000"+ - "\u0000\u00c2\u0364\u0001\u0000\u0000\u0000\u00c4\u0370\u0001\u0000\u0000"+ - "\u0000\u00c6\u0373\u0001\u0000\u0000\u0000\u00c8\u0377\u0001\u0000\u0000"+ - "\u0000\u00ca\u037b\u0001\u0000\u0000\u0000\u00cc\u037f\u0001\u0000\u0000"+ - "\u0000\u00ce\u0383\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ - "\u0000\u00d2\u038c\u0001\u0000\u0000\u0000\u00d4\u0390\u0001\u0000\u0000"+ - "\u0000\u00d6\u0398\u0001\u0000\u0000\u0000\u00d8\u03ad\u0001\u0000\u0000"+ - "\u0000\u00da\u03b1\u0001\u0000\u0000\u0000\u00dc\u03b5\u0001\u0000\u0000"+ - "\u0000\u00de\u03b9\u0001\u0000\u0000\u0000\u00e0\u03bd\u0001\u0000\u0000"+ - "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c6\u0001\u0000\u0000"+ - "\u0000\u00e6\u03ca\u0001\u0000\u0000\u0000\u00e8\u03ce\u0001\u0000\u0000"+ - "\u0000\u00ea\u03d2\u0001\u0000\u0000\u0000\u00ec\u03d5\u0001\u0000\u0000"+ - "\u0000\u00ee\u03d9\u0001\u0000\u0000\u0000\u00f0\u03dd\u0001\u0000\u0000"+ - "\u0000\u00f2\u03e1\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ - "\u0000\u00f6\u03ea\u0001\u0000\u0000\u0000\u00f8\u03ef\u0001\u0000\u0000"+ - "\u0000\u00fa\u03f4\u0001\u0000\u0000\u0000\u00fc\u03fb\u0001\u0000\u0000"+ - "\u0000\u00fe\u0404\u0001\u0000\u0000\u0000\u0100\u040b\u0001\u0000\u0000"+ - "\u0000\u0102\u040f\u0001\u0000\u0000\u0000\u0104\u0413\u0001\u0000\u0000"+ - "\u0000\u0106\u0417\u0001\u0000\u0000\u0000\u0108\u041b\u0001\u0000\u0000"+ - "\u0000\u010a\u041f\u0001\u0000\u0000\u0000\u010c\u0425\u0001\u0000\u0000"+ - "\u0000\u010e\u0429\u0001\u0000\u0000\u0000\u0110\u042d\u0001\u0000\u0000"+ - "\u0000\u0112\u0431\u0001\u0000\u0000\u0000\u0114\u0435\u0001\u0000\u0000"+ - "\u0000\u0116\u0439\u0001\u0000\u0000\u0000\u0118\u043d\u0001\u0000\u0000"+ - "\u0000\u011a\u0441\u0001\u0000\u0000\u0000\u011c\u0445\u0001\u0000\u0000"+ - "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044e\u0001\u0000\u0000"+ - "\u0000\u0122\u0452\u0001\u0000\u0000\u0000\u0124\u0456\u0001\u0000\u0000"+ - "\u0000\u0126\u045a\u0001\u0000\u0000\u0000\u0128\u045e\u0001\u0000\u0000"+ - "\u0000\u012a\u0462\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ - "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u0470\u0001\u0000\u0000"+ - "\u0000\u0132\u0474\u0001\u0000\u0000\u0000\u0134\u0478\u0001\u0000\u0000"+ - "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0481\u0001\u0000\u0000"+ - "\u0000\u013a\u048b\u0001\u0000\u0000\u0000\u013c\u048f\u0001\u0000\u0000"+ - "\u0000\u013e\u0493\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ - "\u0000\u0142\u049c\u0001\u0000\u0000\u0000\u0144\u04a3\u0001\u0000\u0000"+ - "\u0000\u0146\u04a7\u0001\u0000\u0000\u0000\u0148\u04ab\u0001\u0000\u0000"+ - "\u0000\u014a\u04af\u0001\u0000\u0000\u0000\u014c\u014d\u0005d\u0000\u0000"+ - "\u014d\u014e\u0005i\u0000\u0000\u014e\u014f\u0005s\u0000\u0000\u014f\u0150"+ - "\u0005s\u0000\u0000\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005c"+ - "\u0000\u0000\u0152\u0153\u0005t\u0000\u0000\u0153\u0154\u0001\u0000\u0000"+ - "\u0000\u0154\u0155\u0006\u0000\u0000\u0000\u0155\r\u0001\u0000\u0000\u0000"+ - "\u0156\u0157\u0005d\u0000\u0000\u0157\u0158\u0005r\u0000\u0000\u0158\u0159"+ - "\u0005o\u0000\u0000\u0159\u015a\u0005p\u0000\u0000\u015a\u015b\u0001\u0000"+ - "\u0000\u0000\u015b\u015c\u0006\u0001\u0001\u0000\u015c\u000f\u0001\u0000"+ - "\u0000\u0000\u015d\u015e\u0005e\u0000\u0000\u015e\u015f\u0005n\u0000\u0000"+ - "\u015f\u0160\u0005r\u0000\u0000\u0160\u0161\u0005i\u0000\u0000\u0161\u0162"+ - "\u0005c\u0000\u0000\u0162\u0163\u0005h\u0000\u0000\u0163\u0164\u0001\u0000"+ - "\u0000\u0000\u0164\u0165\u0006\u0002\u0002\u0000\u0165\u0011\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005v\u0000\u0000"+ - "\u0168\u0169\u0005a\u0000\u0000\u0169\u016a\u0005l\u0000\u0000\u016a\u016b"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0006\u0003\u0000\u0000\u016c\u0013"+ - "\u0001\u0000\u0000\u0000\u016d\u016e\u0005e\u0000\u0000\u016e\u016f\u0005"+ - "x\u0000\u0000\u016f\u0170\u0005p\u0000\u0000\u0170\u0171\u0005l\u0000"+ - "\u0000\u0171\u0172\u0005a\u0000\u0000\u0172\u0173\u0005i\u0000\u0000\u0173"+ - "\u0174\u0005n\u0000\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0176"+ - "\u0006\u0004\u0003\u0000\u0176\u0015\u0001\u0000\u0000\u0000\u0177\u0178"+ - "\u0005f\u0000\u0000\u0178\u0179\u0005r\u0000\u0000\u0179\u017a\u0005o"+ - "\u0000\u0000\u017a\u017b\u0005m\u0000\u0000\u017b\u017c\u0001\u0000\u0000"+ - "\u0000\u017c\u017d\u0006\u0005\u0004\u0000\u017d\u0017\u0001\u0000\u0000"+ - "\u0000\u017e\u017f\u0005g\u0000\u0000\u017f\u0180\u0005r\u0000\u0000\u0180"+ - "\u0181\u0005o\u0000\u0000\u0181\u0182\u0005k\u0000\u0000\u0182\u0183\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0006\u0006\u0000\u0000\u0184\u0019\u0001"+ - "\u0000\u0000\u0000\u0185\u0186\u0005i\u0000\u0000\u0186\u0187\u0005n\u0000"+ - "\u0000\u0187\u0188\u0005l\u0000\u0000\u0188\u0189\u0005i\u0000\u0000\u0189"+ - "\u018a\u0005n\u0000\u0000\u018a\u018b\u0005e\u0000\u0000\u018b\u018c\u0005"+ - "s\u0000\u0000\u018c\u018d\u0005t\u0000\u0000\u018d\u018e\u0005a\u0000"+ - "\u0000\u018e\u018f\u0005t\u0000\u0000\u018f\u0190\u0005s\u0000\u0000\u0190"+ - "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\u0007\u0000\u0000\u0192"+ - "\u001b\u0001\u0000\u0000\u0000\u0193\u0194\u0005k\u0000\u0000\u0194\u0195"+ - "\u0005e\u0000\u0000\u0195\u0196\u0005e\u0000\u0000\u0196\u0197\u0005p"+ - "\u0000\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198\u0199\u0006\b\u0001"+ - "\u0000\u0199\u001d\u0001\u0000\u0000\u0000\u019a\u019b\u0005l\u0000\u0000"+ - "\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005m\u0000\u0000\u019d\u019e"+ - "\u0005i\u0000\u0000\u019e\u019f\u0005t\u0000\u0000\u019f\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a0\u01a1\u0006\t\u0000\u0000\u01a1\u001f\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a3\u0005m\u0000\u0000\u01a3\u01a4\u0005e\u0000\u0000\u01a4"+ - "\u01a5\u0005t\u0000\u0000\u01a5\u01a6\u0005a\u0000\u0000\u01a6\u01a7\u0001"+ - "\u0000\u0000\u0000\u01a7\u01a8\u0006\n\u0005\u0000\u01a8!\u0001\u0000"+ - "\u0000\u0000\u01a9\u01aa\u0005m\u0000\u0000\u01aa\u01ab\u0005v\u0000\u0000"+ - "\u01ab\u01ac\u0005_\u0000\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae"+ - "\u0005x\u0000\u0000\u01ae\u01af\u0005p\u0000\u0000\u01af\u01b0\u0005a"+ - "\u0000\u0000\u01b0\u01b1\u0005n\u0000\u0000\u01b1\u01b2\u0005d\u0000\u0000"+ - "\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u000b\u0006\u0000"+ - "\u01b4#\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005r\u0000\u0000\u01b6\u01b7"+ - "\u0005e\u0000\u0000\u01b7\u01b8\u0005n\u0000\u0000\u01b8\u01b9\u0005a"+ - "\u0000\u0000\u01b9\u01ba\u0005m\u0000\u0000\u01ba\u01bb\u0005e\u0000\u0000"+ - "\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\f\u0007\u0000\u01bd"+ - "%\u0001\u0000\u0000\u0000\u01be\u01bf\u0005r\u0000\u0000\u01bf\u01c0\u0005"+ - "o\u0000\u0000\u01c0\u01c1\u0005w\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c2\u01c3\u0006\r\u0000\u0000\u01c3\'\u0001\u0000\u0000\u0000"+ - "\u01c4\u01c5\u0005s\u0000\u0000\u01c5\u01c6\u0005h\u0000\u0000\u01c6\u01c7"+ - "\u0005o\u0000\u0000\u01c7\u01c8\u0005w\u0000\u0000\u01c8\u01c9\u0001\u0000"+ - "\u0000\u0000\u01c9\u01ca\u0006\u000e\b\u0000\u01ca)\u0001\u0000\u0000"+ - "\u0000\u01cb\u01cc\u0005s\u0000\u0000\u01cc\u01cd\u0005o\u0000\u0000\u01cd"+ - "\u01ce\u0005r\u0000\u0000\u01ce\u01cf\u0005t\u0000\u0000\u01cf\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0006\u000f\u0000\u0000\u01d1+\u0001\u0000"+ - "\u0000\u0000\u01d2\u01d3\u0005s\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000"+ - "\u01d4\u01d5\u0005a\u0000\u0000\u01d5\u01d6\u0005t\u0000\u0000\u01d6\u01d7"+ - "\u0005s\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9\u0006"+ - "\u0010\u0000\u0000\u01d9-\u0001\u0000\u0000\u0000\u01da\u01db\u0005w\u0000"+ - "\u0000\u01db\u01dc\u0005h\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd"+ - "\u01de\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0001"+ - "\u0000\u0000\u0000\u01e0\u01e1\u0006\u0011\u0000\u0000\u01e1/\u0001\u0000"+ - "\u0000\u0000\u01e2\u01e4\b\u0000\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000"+ - "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000"+ - "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000"+ - "\u0000\u01e7\u01e8\u0006\u0012\u0000\u0000\u01e81\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ea\u0005/\u0000\u0000\u01ea\u01eb\u0005/\u0000\u0000\u01eb\u01ef"+ - "\u0001\u0000\u0000\u0000\u01ec\u01ee\b\u0001\u0000\u0000\u01ed\u01ec\u0001"+ - "\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f4\u0005"+ - "\r\u0000\u0000\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000"+ - "\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5\u01f7\u0005\n\u0000"+ - "\u0000\u01f6\u01f5\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000"+ - "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u0013\t\u0000"+ - "\u01f93\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005/\u0000\u0000\u01fb\u01fc"+ - "\u0005*\u0000\u0000\u01fc\u0201\u0001\u0000\u0000\u0000\u01fd\u0200\u0003"+ - "4\u0014\u0000\u01fe\u0200\t\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000"+ - "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000"+ - "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ - "\u0000\u0000\u0202\u0204\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000"+ - "\u0000\u0000\u0204\u0205\u0005*\u0000\u0000\u0205\u0206\u0005/\u0000\u0000"+ - "\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0006\u0014\t\u0000\u0208"+ - "5\u0001\u0000\u0000\u0000\u0209\u020b\u0007\u0002\u0000\u0000\u020a\u0209"+ - "\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020a"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d\u020e"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\u0015\t\u0000\u020f7\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0003\u00a4L\u0000\u0211\u0212\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0006\u0016\n\u0000\u0213\u0214\u0006\u0016\u000b"+ - "\u0000\u02149\u0001\u0000\u0000\u0000\u0215\u0216\u0003B\u001b\u0000\u0216"+ - "\u0217\u0001\u0000\u0000\u0000\u0217\u0218\u0006\u0017\f\u0000\u0218\u0219"+ - "\u0006\u0017\r\u0000\u0219;\u0001\u0000\u0000\u0000\u021a\u021b\u0003"+ - "6\u0015\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021d\u0006\u0018"+ - "\t\u0000\u021d=\u0001\u0000\u0000\u0000\u021e\u021f\u00032\u0013\u0000"+ - "\u021f\u0220\u0001\u0000\u0000\u0000\u0220\u0221\u0006\u0019\t\u0000\u0221"+ - "?\u0001\u0000\u0000\u0000\u0222\u0223\u00034\u0014\u0000\u0223\u0224\u0001"+ - "\u0000\u0000\u0000\u0224\u0225\u0006\u001a\t\u0000\u0225A\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0005|\u0000\u0000\u0227\u0228\u0001\u0000\u0000"+ - "\u0000\u0228\u0229\u0006\u001b\r\u0000\u0229C\u0001\u0000\u0000\u0000"+ - "\u022a\u022b\u0007\u0003\u0000\u0000\u022bE\u0001\u0000\u0000\u0000\u022c"+ - "\u022d\u0007\u0004\u0000\u0000\u022dG\u0001\u0000\u0000\u0000\u022e\u022f"+ - "\u0005\\\u0000\u0000\u022f\u0230\u0007\u0005\u0000\u0000\u0230I\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\b\u0006\u0000\u0000\u0232K\u0001\u0000"+ - "\u0000\u0000\u0233\u0235\u0007\u0007\u0000\u0000\u0234\u0236\u0007\b\u0000"+ - "\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000"+ - "\u0000\u0236\u0238\u0001\u0000\u0000\u0000\u0237\u0239\u0003D\u001c\u0000"+ - "\u0238\u0237\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ - "\u023a\u0238\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000"+ - "\u023bM\u0001\u0000\u0000\u0000\u023c\u023d\u0005@\u0000\u0000\u023dO"+ - "\u0001\u0000\u0000\u0000\u023e\u023f\u0005`\u0000\u0000\u023fQ\u0001\u0000"+ - "\u0000\u0000\u0240\u0244\b\t\u0000\u0000\u0241\u0242\u0005`\u0000\u0000"+ - "\u0242\u0244\u0005`\u0000\u0000\u0243\u0240\u0001\u0000\u0000\u0000\u0243"+ - "\u0241\u0001\u0000\u0000\u0000\u0244S\u0001\u0000\u0000\u0000\u0245\u0246"+ - "\u0005_\u0000\u0000\u0246U\u0001\u0000\u0000\u0000\u0247\u024b\u0003F"+ - "\u001d\u0000\u0248\u024b\u0003D\u001c\u0000\u0249\u024b\u0003T$\u0000"+ - "\u024a\u0247\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000\u0000\u0000"+ - "\u024a\u0249\u0001\u0000\u0000\u0000\u024bW\u0001\u0000\u0000\u0000\u024c"+ - "\u0251\u0005\"\u0000\u0000\u024d\u0250\u0003H\u001e\u0000\u024e\u0250"+ - "\u0003J\u001f\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u024f\u024e\u0001"+ - "\u0000\u0000\u0000\u0250\u0253\u0001\u0000\u0000\u0000\u0251\u024f\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0254\u0001"+ - "\u0000\u0000\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0254\u026a\u0005"+ - "\"\u0000\u0000\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000"+ - "\u0000\u0257\u0258\u0005\"\u0000\u0000\u0258\u025c\u0001\u0000\u0000\u0000"+ - "\u0259\u025b\b\u0001\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b"+ - "\u025e\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c"+ - "\u025a\u0001\u0000\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e"+ - "\u025c\u0001\u0000\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0261"+ - "\u0005\"\u0000\u0000\u0261\u0262\u0005\"\u0000\u0000\u0262\u0264\u0001"+ - "\u0000\u0000\u0000\u0263\u0265\u0005\"\u0000\u0000\u0264\u0263\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0267\u0001\u0000"+ - "\u0000\u0000\u0266\u0268\u0005\"\u0000\u0000\u0267\u0266\u0001\u0000\u0000"+ - "\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000"+ - "\u0000\u0269\u024c\u0001\u0000\u0000\u0000\u0269\u0255\u0001\u0000\u0000"+ - "\u0000\u026aY\u0001\u0000\u0000\u0000\u026b\u026d\u0003D\u001c\u0000\u026c"+ - "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ - "\u026c\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f"+ - "[\u0001\u0000\u0000\u0000\u0270\u0272\u0003D\u001c\u0000\u0271\u0270\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001"+ - "\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0279\u0003l0\u0000\u0276\u0278\u0003D\u001c"+ - "\u0000\u0277\u0276\u0001\u0000\u0000\u0000\u0278\u027b\u0001\u0000\u0000"+ - "\u0000\u0279\u0277\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000"+ - "\u0000\u027a\u029b\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000"+ - "\u0000\u027c\u027e\u0003l0\u0000\u027d\u027f\u0003D\u001c\u0000\u027e"+ - "\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280"+ - "\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000\u0000\u0000\u0281"+ - "\u029b\u0001\u0000\u0000\u0000\u0282\u0284\u0003D\u001c\u0000\u0283\u0282"+ - "\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0283"+ - "\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u028e"+ - "\u0001\u0000\u0000\u0000\u0287\u028b\u0003l0\u0000\u0288\u028a\u0003D"+ - "\u001c\u0000\u0289\u0288\u0001\u0000\u0000\u0000\u028a\u028d\u0001\u0000"+ - "\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000"+ - "\u0000\u0000\u028c\u028f\u0001\u0000\u0000\u0000\u028d\u028b\u0001\u0000"+ - "\u0000\u0000\u028e\u0287\u0001\u0000\u0000\u0000\u028e\u028f\u0001\u0000"+ - "\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0003L \u0000"+ - "\u0291\u029b\u0001\u0000\u0000\u0000\u0292\u0294\u0003l0\u0000\u0293\u0295"+ - "\u0003D\u001c\u0000\u0294\u0293\u0001\u0000\u0000\u0000\u0295\u0296\u0001"+ - "\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000\u0296\u0297\u0001"+ - "\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u0299\u0003"+ - "L \u0000\u0299\u029b\u0001\u0000\u0000\u0000\u029a\u0271\u0001\u0000\u0000"+ - "\u0000\u029a\u027c\u0001\u0000\u0000\u0000\u029a\u0283\u0001\u0000\u0000"+ - "\u0000\u029a\u0292\u0001\u0000\u0000\u0000\u029b]\u0001\u0000\u0000\u0000"+ - "\u029c\u029d\u0005b\u0000\u0000\u029d\u029e\u0005y\u0000\u0000\u029e_"+ - "\u0001\u0000\u0000\u0000\u029f\u02a0\u0005a\u0000\u0000\u02a0\u02a1\u0005"+ - "n\u0000\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2a\u0001\u0000\u0000"+ - "\u0000\u02a3\u02a4\u0005a\u0000\u0000\u02a4\u02a5\u0005s\u0000\u0000\u02a5"+ - "\u02a6\u0005c\u0000\u0000\u02a6c\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005"+ - "=\u0000\u0000\u02a8e\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005:\u0000"+ - "\u0000\u02aa\u02ab\u0005:\u0000\u0000\u02abg\u0001\u0000\u0000\u0000\u02ac"+ - "\u02ad\u0005,\u0000\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005"+ - "d\u0000\u0000\u02af\u02b0\u0005e\u0000\u0000\u02b0\u02b1\u0005s\u0000"+ - "\u0000\u02b1\u02b2\u0005c\u0000\u0000\u02b2k\u0001\u0000\u0000\u0000\u02b3"+ - "\u02b4\u0005.\u0000\u0000\u02b4m\u0001\u0000\u0000\u0000\u02b5\u02b6\u0005"+ - "f\u0000\u0000\u02b6\u02b7\u0005a\u0000\u0000\u02b7\u02b8\u0005l\u0000"+ - "\u0000\u02b8\u02b9\u0005s\u0000\u0000\u02b9\u02ba\u0005e\u0000\u0000\u02ba"+ - "o\u0001\u0000\u0000\u0000\u02bb\u02bc\u0005f\u0000\u0000\u02bc\u02bd\u0005"+ - "i\u0000\u0000\u02bd\u02be\u0005r\u0000\u0000\u02be\u02bf\u0005s\u0000"+ - "\u0000\u02bf\u02c0\u0005t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1"+ - "\u02c2\u0005l\u0000\u0000\u02c2\u02c3\u0005a\u0000\u0000\u02c3\u02c4\u0005"+ - "s\u0000\u0000\u02c4\u02c5\u0005t\u0000\u0000\u02c5s\u0001\u0000\u0000"+ - "\u0000\u02c6\u02c7\u0005(\u0000\u0000\u02c7u\u0001\u0000\u0000\u0000\u02c8"+ - "\u02c9\u0005i\u0000\u0000\u02c9\u02ca\u0005n\u0000\u0000\u02caw\u0001"+ - "\u0000\u0000\u0000\u02cb\u02cc\u0005i\u0000\u0000\u02cc\u02cd\u0005s\u0000"+ - "\u0000\u02cdy\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005l\u0000\u0000\u02cf"+ - "\u02d0\u0005i\u0000\u0000\u02d0\u02d1\u0005k\u0000\u0000\u02d1\u02d2\u0005"+ - "e\u0000\u0000\u02d2{\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005n\u0000"+ - "\u0000\u02d4\u02d5\u0005o\u0000\u0000\u02d5\u02d6\u0005t\u0000\u0000\u02d6"+ - "}\u0001\u0000\u0000\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005"+ - "u\u0000\u0000\u02d9\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000"+ - "\u0000\u02db\u007f\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005n\u0000\u0000"+ - "\u02dd\u02de\u0005u\u0000\u0000\u02de\u02df\u0005l\u0000\u0000\u02df\u02e0"+ - "\u0005l\u0000\u0000\u02e0\u02e1\u0005s\u0000\u0000\u02e1\u0081\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e3\u0005o\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000"+ - "\u02e4\u0083\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005?\u0000\u0000\u02e6"+ - "\u0085\u0001\u0000\u0000\u0000\u02e7\u02e8\u0005r\u0000\u0000\u02e8\u02e9"+ - "\u0005l\u0000\u0000\u02e9\u02ea\u0005i\u0000\u0000\u02ea\u02eb\u0005k"+ - "\u0000\u0000\u02eb\u02ec\u0005e\u0000\u0000\u02ec\u0087\u0001\u0000\u0000"+ - "\u0000\u02ed\u02ee\u0005)\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000"+ - "\u02ef\u02f0\u0005t\u0000\u0000\u02f0\u02f1\u0005r\u0000\u0000\u02f1\u02f2"+ - "\u0005u\u0000\u0000\u02f2\u02f3\u0005e\u0000\u0000\u02f3\u008b\u0001\u0000"+ - "\u0000\u0000\u02f4\u02f5\u0005=\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000"+ - "\u02f6\u008d\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005=\u0000\u0000\u02f8"+ - "\u02f9\u0005~\u0000\u0000\u02f9\u008f\u0001\u0000\u0000\u0000\u02fa\u02fb"+ - "\u0005!\u0000\u0000\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0091\u0001\u0000"+ - "\u0000\u0000\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u0093\u0001\u0000\u0000"+ - "\u0000\u02ff\u0300\u0005<\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301"+ - "\u0095\u0001\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0097"+ - "\u0001\u0000\u0000\u0000\u0304\u0305\u0005>\u0000\u0000\u0305\u0306\u0005"+ - "=\u0000\u0000\u0306\u0099\u0001\u0000\u0000\u0000\u0307\u0308\u0005+\u0000"+ - "\u0000\u0308\u009b\u0001\u0000\u0000\u0000\u0309\u030a\u0005-\u0000\u0000"+ - "\u030a\u009d\u0001\u0000\u0000\u0000\u030b\u030c\u0005*\u0000\u0000\u030c"+ - "\u009f\u0001\u0000\u0000\u0000\u030d\u030e\u0005/\u0000\u0000\u030e\u00a1"+ - "\u0001\u0000\u0000\u0000\u030f\u0310\u0005%\u0000\u0000\u0310\u00a3\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0005[\u0000\u0000\u0312\u0313\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0006L\u0000\u0000\u0314\u0315\u0006L\u0000\u0000"+ - "\u0315\u00a5\u0001\u0000\u0000\u0000\u0316\u0317\u0005]\u0000\u0000\u0317"+ - "\u0318\u0001\u0000\u0000\u0000\u0318\u0319\u0006M\r\u0000\u0319\u031a"+ - "\u0006M\r\u0000\u031a\u00a7\u0001\u0000\u0000\u0000\u031b\u031f\u0003"+ - "F\u001d\u0000\u031c\u031e\u0003V%\u0000\u031d\u031c\u0001\u0000\u0000"+ - "\u0000\u031e\u0321\u0001\u0000\u0000\u0000\u031f\u031d\u0001\u0000\u0000"+ - "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u032c\u0001\u0000\u0000"+ - "\u0000\u0321\u031f\u0001\u0000\u0000\u0000\u0322\u0325\u0003T$\u0000\u0323"+ - "\u0325\u0003N!\u0000\u0324\u0322\u0001\u0000\u0000\u0000\u0324\u0323\u0001"+ - "\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000\u0000\u0326\u0328\u0003"+ - "V%\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u0329\u0001\u0000\u0000"+ - "\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000"+ - "\u0000\u032a\u032c\u0001\u0000\u0000\u0000\u032b\u031b\u0001\u0000\u0000"+ - "\u0000\u032b\u0324\u0001\u0000\u0000\u0000\u032c\u00a9\u0001\u0000\u0000"+ - "\u0000\u032d\u032f\u0003P\"\u0000\u032e\u0330\u0003R#\u0000\u032f\u032e"+ - "\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u032f"+ - "\u0001\u0000\u0000\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332\u0333"+ - "\u0001\u0000\u0000\u0000\u0333\u0334\u0003P\"\u0000\u0334\u00ab\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0003\u00aaO\u0000\u0336\u00ad\u0001\u0000"+ - "\u0000\u0000\u0337\u0338\u00032\u0013\u0000\u0338\u0339\u0001\u0000\u0000"+ - "\u0000\u0339\u033a\u0006Q\t\u0000\u033a\u00af\u0001\u0000\u0000\u0000"+ - "\u033b\u033c\u00034\u0014\u0000\u033c\u033d\u0001\u0000\u0000\u0000\u033d"+ - "\u033e\u0006R\t\u0000\u033e\u00b1\u0001\u0000\u0000\u0000\u033f\u0340"+ - "\u00036\u0015\u0000\u0340\u0341\u0001\u0000\u0000\u0000\u0341\u0342\u0006"+ - "S\t\u0000\u0342\u00b3\u0001\u0000\u0000\u0000\u0343\u0344\u0003B\u001b"+ - "\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006T\f\u0000"+ - "\u0346\u0347\u0006T\r\u0000\u0347\u00b5\u0001\u0000\u0000\u0000\u0348"+ - "\u0349\u0003\u00a4L\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a\u034b"+ - "\u0006U\n\u0000\u034b\u00b7\u0001\u0000\u0000\u0000\u034c\u034d\u0003"+ - "\u00a6M\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034f\u0006V\u000e"+ - "\u0000\u034f\u00b9\u0001\u0000\u0000\u0000\u0350\u0351\u0003h.\u0000\u0351"+ - "\u0352\u0001\u0000\u0000\u0000\u0352\u0353\u0006W\u000f\u0000\u0353\u00bb"+ - "\u0001\u0000\u0000\u0000\u0354\u0355\u0003d,\u0000\u0355\u0356\u0001\u0000"+ - "\u0000\u0000\u0356\u0357\u0006X\u0010\u0000\u0357\u00bd\u0001\u0000\u0000"+ - "\u0000\u0358\u0359\u0003X&\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0006Y\u0011\u0000\u035b\u00bf\u0001\u0000\u0000\u0000\u035c\u035d"+ - "\u0005o\u0000\u0000\u035d\u035e\u0005p\u0000\u0000\u035e\u035f\u0005t"+ - "\u0000\u0000\u035f\u0360\u0005i\u0000\u0000\u0360\u0361\u0005o\u0000\u0000"+ - "\u0361\u0362\u0005n\u0000\u0000\u0362\u0363\u0005s\u0000\u0000\u0363\u00c1"+ - "\u0001\u0000\u0000\u0000\u0364\u0365\u0005m\u0000\u0000\u0365\u0366\u0005"+ - "e\u0000\u0000\u0366\u0367\u0005t\u0000\u0000\u0367\u0368\u0005a\u0000"+ - "\u0000\u0368\u0369\u0005d\u0000\u0000\u0369\u036a\u0005a\u0000\u0000\u036a"+ - "\u036b\u0005t\u0000\u0000\u036b\u036c\u0005a\u0000\u0000\u036c\u00c3\u0001"+ - "\u0000\u0000\u0000\u036d\u0371\b\n\u0000\u0000\u036e\u036f\u0005/\u0000"+ - "\u0000\u036f\u0371\b\u000b\u0000\u0000\u0370\u036d\u0001\u0000\u0000\u0000"+ - "\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u00c5\u0001\u0000\u0000\u0000"+ - "\u0372\u0374\u0003\u00c4\\\u0000\u0373\u0372\u0001\u0000\u0000\u0000\u0374"+ - "\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375"+ - "\u0376\u0001\u0000\u0000\u0000\u0376\u00c7\u0001\u0000\u0000\u0000\u0377"+ - "\u0378\u0003\u00acP\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a"+ - "\u0006^\u0012\u0000\u037a\u00c9\u0001\u0000\u0000\u0000\u037b\u037c\u0003"+ - "2\u0013\u0000\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006_\t"+ - "\u0000\u037e\u00cb\u0001\u0000\u0000\u0000\u037f\u0380\u00034\u0014\u0000"+ - "\u0380\u0381\u0001\u0000\u0000\u0000\u0381\u0382\u0006`\t\u0000\u0382"+ - "\u00cd\u0001\u0000\u0000\u0000\u0383\u0384\u00036\u0015\u0000\u0384\u0385"+ - "\u0001\u0000\u0000\u0000\u0385\u0386\u0006a\t\u0000\u0386\u00cf\u0001"+ - "\u0000\u0000\u0000\u0387\u0388\u0003B\u001b\u0000\u0388\u0389\u0001\u0000"+ - "\u0000\u0000\u0389\u038a\u0006b\f\u0000\u038a\u038b\u0006b\r\u0000\u038b"+ - "\u00d1\u0001\u0000\u0000\u0000\u038c\u038d\u0003l0\u0000\u038d\u038e\u0001"+ - "\u0000\u0000\u0000\u038e\u038f\u0006c\u0013\u0000\u038f\u00d3\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0003h.\u0000\u0391\u0392\u0001\u0000\u0000\u0000"+ - "\u0392\u0393\u0006d\u000f\u0000\u0393\u00d5\u0001\u0000\u0000\u0000\u0394"+ - "\u0399\u0003F\u001d\u0000\u0395\u0399\u0003D\u001c\u0000\u0396\u0399\u0003"+ - "T$\u0000\u0397\u0399\u0003\u009eI\u0000\u0398\u0394\u0001\u0000\u0000"+ - "\u0000\u0398\u0395\u0001\u0000\u0000\u0000\u0398\u0396\u0001\u0000\u0000"+ - "\u0000\u0398\u0397\u0001\u0000\u0000\u0000\u0399\u00d7\u0001\u0000\u0000"+ - "\u0000\u039a\u039d\u0003F\u001d\u0000\u039b\u039d\u0003\u009eI\u0000\u039c"+ - "\u039a\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d"+ - "\u03a1\u0001\u0000\u0000\u0000\u039e\u03a0\u0003\u00d6e\u0000\u039f\u039e"+ - "\u0001\u0000\u0000\u0000\u03a0\u03a3\u0001\u0000\u0000\u0000\u03a1\u039f"+ - "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03ae"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a1\u0001\u0000\u0000\u0000\u03a4\u03a7"+ - "\u0003T$\u0000\u03a5\u03a7\u0003N!\u0000\u03a6\u03a4\u0001\u0000\u0000"+ - "\u0000\u03a6\u03a5\u0001\u0000\u0000\u0000\u03a7\u03a9\u0001\u0000\u0000"+ - "\u0000\u03a8\u03aa\u0003\u00d6e\u0000\u03a9\u03a8\u0001\u0000\u0000\u0000"+ - "\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000"+ - "\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ae\u0001\u0000\u0000\u0000"+ - "\u03ad\u039c\u0001\u0000\u0000\u0000\u03ad\u03a6\u0001\u0000\u0000\u0000"+ - "\u03ae\u00d9\u0001\u0000\u0000\u0000\u03af\u03b2\u0003\u00d8f\u0000\u03b0"+ - "\u03b2\u0003\u00aaO\u0000\u03b1\u03af\u0001\u0000\u0000\u0000\u03b1\u03b0"+ - "\u0001\u0000\u0000\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b1"+ - "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u00db"+ - "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00032\u0013\u0000\u03b6\u03b7\u0001"+ - "\u0000\u0000\u0000\u03b7\u03b8\u0006h\t\u0000\u03b8\u00dd\u0001\u0000"+ - "\u0000\u0000\u03b9\u03ba\u00034\u0014\u0000\u03ba\u03bb\u0001\u0000\u0000"+ - "\u0000\u03bb\u03bc\u0006i\t\u0000\u03bc\u00df\u0001\u0000\u0000\u0000"+ - "\u03bd\u03be\u00036\u0015\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf"+ - "\u03c0\u0006j\t\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2"+ - "\u0003B\u001b\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006"+ - "k\f\u0000\u03c4\u03c5\u0006k\r\u0000\u03c5\u00e3\u0001\u0000\u0000\u0000"+ - "\u03c6\u03c7\u0003d,\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9"+ - "\u0006l\u0010\u0000\u03c9\u00e5\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003"+ - "h.\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006m\u000f"+ - "\u0000\u03cd\u00e7\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003l0\u0000\u03cf"+ - "\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006n\u0013\u0000\u03d1\u00e9"+ - "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0005a\u0000\u0000\u03d3\u03d4\u0005"+ - "s\u0000\u0000\u03d4\u00eb\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00da"+ - "g\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006p\u0014\u0000"+ - "\u03d8\u00ed\u0001\u0000\u0000\u0000\u03d9\u03da\u00032\u0013\u0000\u03da"+ - "\u03db\u0001\u0000\u0000\u0000\u03db\u03dc\u0006q\t\u0000\u03dc\u00ef"+ - "\u0001\u0000\u0000\u0000\u03dd\u03de\u00034\u0014\u0000\u03de\u03df\u0001"+ - "\u0000\u0000\u0000\u03df\u03e0\u0006r\t\u0000\u03e0\u00f1\u0001\u0000"+ - "\u0000\u0000\u03e1\u03e2\u00036\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000"+ - "\u0000\u03e3\u03e4\u0006s\t\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000"+ - "\u03e5\u03e6\u0003B\u001b\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7"+ - "\u03e8\u0006t\f\u0000\u03e8\u03e9\u0006t\r\u0000\u03e9\u00f5\u0001\u0000"+ - "\u0000\u0000\u03ea\u03eb\u0003\u00a4L\u0000\u03eb\u03ec\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0006u\n\u0000\u03ed\u03ee\u0006u\u0015\u0000\u03ee"+ - "\u00f7\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005o\u0000\u0000\u03f0\u03f1"+ - "\u0005n\u0000\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006"+ - "v\u0016\u0000\u03f3\u00f9\u0001\u0000\u0000\u0000\u03f4\u03f5\u0005w\u0000"+ - "\u0000\u03f5\u03f6\u0005i\u0000\u0000\u03f6\u03f7\u0005t\u0000\u0000\u03f7"+ - "\u03f8\u0005h\u0000\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u0006w\u0016\u0000\u03fa\u00fb\u0001\u0000\u0000\u0000\u03fb\u03fc\b"+ - "\f\u0000\u0000\u03fc\u00fd\u0001\u0000\u0000\u0000\u03fd\u03ff\u0003\u00fc"+ - "x\u0000\u03fe\u03fd\u0001\u0000\u0000\u0000\u03ff\u0400\u0001\u0000\u0000"+ - "\u0000\u0400\u03fe\u0001\u0000\u0000\u0000\u0400\u0401\u0001\u0000\u0000"+ - "\u0000\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0003\u0142\u009b"+ - "\u0000\u0403\u0405\u0001\u0000\u0000\u0000\u0404\u03fe\u0001\u0000\u0000"+ - "\u0000\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0407\u0001\u0000\u0000"+ - "\u0000\u0406\u0408\u0003\u00fcx\u0000\u0407\u0406\u0001\u0000\u0000\u0000"+ - "\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u0407\u0001\u0000\u0000\u0000"+ - "\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u00ff\u0001\u0000\u0000\u0000"+ - "\u040b\u040c\u0003\u00acP\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d"+ - "\u040e\u0006z\u0012\u0000\u040e\u0101\u0001\u0000\u0000\u0000\u040f\u0410"+ - "\u0003\u00fey\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006"+ - "{\u0017\u0000\u0412\u0103\u0001\u0000\u0000\u0000\u0413\u0414\u00032\u0013"+ - "\u0000\u0414\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006|\t\u0000"+ - "\u0416\u0105\u0001\u0000\u0000\u0000\u0417\u0418\u00034\u0014\u0000\u0418"+ - "\u0419\u0001\u0000\u0000\u0000\u0419\u041a\u0006}\t\u0000\u041a\u0107"+ - "\u0001\u0000\u0000\u0000\u041b\u041c\u00036\u0015\u0000\u041c\u041d\u0001"+ - "\u0000\u0000\u0000\u041d\u041e\u0006~\t\u0000\u041e\u0109\u0001\u0000"+ - "\u0000\u0000\u041f\u0420\u0003B\u001b\u0000\u0420\u0421\u0001\u0000\u0000"+ - "\u0000\u0421\u0422\u0006\u007f\f\u0000\u0422\u0423\u0006\u007f\r\u0000"+ - "\u0423\u0424\u0006\u007f\r\u0000\u0424\u010b\u0001\u0000\u0000\u0000\u0425"+ - "\u0426\u0003d,\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006"+ - "\u0080\u0010\u0000\u0428\u010d\u0001\u0000\u0000\u0000\u0429\u042a\u0003"+ - "h.\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006\u0081\u000f"+ - "\u0000\u042c\u010f\u0001\u0000\u0000\u0000\u042d\u042e\u0003l0\u0000\u042e"+ - "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006\u0082\u0013\u0000\u0430"+ - "\u0111\u0001\u0000\u0000\u0000\u0431\u0432\u0003\u00faw\u0000\u0432\u0433"+ - "\u0001\u0000\u0000\u0000\u0433\u0434\u0006\u0083\u0018\u0000\u0434\u0113"+ - "\u0001\u0000\u0000\u0000\u0435\u0436\u0003\u00dag\u0000\u0436\u0437\u0001"+ - "\u0000\u0000\u0000\u0437\u0438\u0006\u0084\u0014\u0000\u0438\u0115\u0001"+ - "\u0000\u0000\u0000\u0439\u043a\u0003\u00acP\u0000\u043a\u043b\u0001\u0000"+ - "\u0000\u0000\u043b\u043c\u0006\u0085\u0012\u0000\u043c\u0117\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00032\u0013\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006\u0086\t\u0000\u0440\u0119\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u00034\u0014\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006\u0087\t\u0000\u0444\u011b\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u00036\u0015\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "\u0088\t\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003B"+ - "\u001b\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089"+ - "\f\u0000\u044c\u044d\u0006\u0089\r\u0000\u044d\u011f\u0001\u0000\u0000"+ - "\u0000\u044e\u044f\u0003l0\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450"+ - "\u0451\u0006\u008a\u0013\u0000\u0451\u0121\u0001\u0000\u0000\u0000\u0452"+ - "\u0453\u0003\u00acP\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455"+ - "\u0006\u008b\u0012\u0000\u0455\u0123\u0001\u0000\u0000\u0000\u0456\u0457"+ - "\u0003\u00a8N\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006"+ - "\u008c\u0019\u0000\u0459\u0125\u0001\u0000\u0000\u0000\u045a\u045b\u0003"+ - "2\u0013\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c\u045d\u0006\u008d"+ - "\t\u0000\u045d\u0127\u0001\u0000\u0000\u0000\u045e\u045f\u00034\u0014"+ - "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006\u008e\t\u0000"+ - "\u0461\u0129\u0001\u0000\u0000\u0000\u0462\u0463\u00036\u0015\u0000\u0463"+ - "\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006\u008f\t\u0000\u0465\u012b"+ - "\u0001\u0000\u0000\u0000\u0466\u0467\u0003B\u001b\u0000\u0467\u0468\u0001"+ - "\u0000\u0000\u0000\u0468\u0469\u0006\u0090\f\u0000\u0469\u046a\u0006\u0090"+ - "\r\u0000\u046a\u012d\u0001\u0000\u0000\u0000\u046b\u046c\u0005i\u0000"+ - "\u0000\u046c\u046d\u0005n\u0000\u0000\u046d\u046e\u0005f\u0000\u0000\u046e"+ - "\u046f\u0005o\u0000\u0000\u046f\u012f\u0001\u0000\u0000\u0000\u0470\u0471"+ - "\u00032\u0013\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473\u0006"+ - "\u0092\t\u0000\u0473\u0131\u0001\u0000\u0000\u0000\u0474\u0475\u00034"+ - "\u0014\u0000\u0475\u0476\u0001\u0000\u0000\u0000\u0476\u0477\u0006\u0093"+ - "\t\u0000\u0477\u0133\u0001\u0000\u0000\u0000\u0478\u0479\u00036\u0015"+ - "\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006\u0094\t\u0000"+ - "\u047b\u0135\u0001\u0000\u0000\u0000\u047c\u047d\u0003B\u001b\u0000\u047d"+ - "\u047e\u0001\u0000\u0000\u0000\u047e\u047f\u0006\u0095\f\u0000\u047f\u0480"+ - "\u0006\u0095\r\u0000\u0480\u0137\u0001\u0000\u0000\u0000\u0481\u0482\u0005"+ - "f\u0000\u0000\u0482\u0483\u0005u\u0000\u0000\u0483\u0484\u0005n\u0000"+ - "\u0000\u0484\u0485\u0005c\u0000\u0000\u0485\u0486\u0005t\u0000\u0000\u0486"+ - "\u0487\u0005i\u0000\u0000\u0487\u0488\u0005o\u0000\u0000\u0488\u0489\u0005"+ - "n\u0000\u0000\u0489\u048a\u0005s\u0000\u0000\u048a\u0139\u0001\u0000\u0000"+ - "\u0000\u048b\u048c\u00032\u0013\u0000\u048c\u048d\u0001\u0000\u0000\u0000"+ - "\u048d\u048e\u0006\u0097\t\u0000\u048e\u013b\u0001\u0000\u0000\u0000\u048f"+ - "\u0490\u00034\u0014\u0000\u0490\u0491\u0001\u0000\u0000\u0000\u0491\u0492"+ - "\u0006\u0098\t\u0000\u0492\u013d\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ - "6\u0015\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006\u0099"+ - "\t\u0000\u0496\u013f\u0001\u0000\u0000\u0000\u0497\u0498\u0003\u00a6M"+ - "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u009a\u000e"+ - "\u0000\u049a\u049b\u0006\u009a\r\u0000\u049b\u0141\u0001\u0000\u0000\u0000"+ - "\u049c\u049d\u0005:\u0000\u0000\u049d\u0143\u0001\u0000\u0000\u0000\u049e"+ - "\u04a4\u0003N!\u0000\u049f\u04a4\u0003D\u001c\u0000\u04a0\u04a4\u0003"+ - "l0\u0000\u04a1\u04a4\u0003F\u001d\u0000\u04a2\u04a4\u0003T$\u0000\u04a3"+ - "\u049e\u0001\u0000\u0000\u0000\u04a3\u049f\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a0\u0001\u0000\u0000\u0000\u04a3\u04a1\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a2\u0001\u0000\u0000\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ - "\u04a3\u0001\u0000\u0000\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6"+ - "\u0145\u0001\u0000\u0000\u0000\u04a7\u04a8\u00032\u0013\u0000\u04a8\u04a9"+ - "\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006\u009d\t\u0000\u04aa\u0147\u0001"+ - "\u0000\u0000\u0000\u04ab\u04ac\u00034\u0014\u0000\u04ac\u04ad\u0001\u0000"+ - "\u0000\u0000\u04ad\u04ae\u0006\u009e\t\u0000\u04ae\u0149\u0001\u0000\u0000"+ - "\u0000\u04af\u04b0\u00036\u0015\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000"+ - "\u04b1\u04b2\u0006\u009f\t\u0000\u04b2\u014b\u0001\u0000\u0000\u0000:"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e5\u01ef"+ - "\u01f3\u01f6\u01ff\u0201\u020c\u0235\u023a\u0243\u024a\u024f\u0251\u025c"+ - "\u0264\u0267\u0269\u026e\u0273\u0279\u0280\u0285\u028b\u028e\u0296\u029a"+ - "\u031f\u0324\u0329\u032b\u0331\u0370\u0375\u0398\u039c\u03a1\u03a6\u03ab"+ - "\u03ad\u03b1\u03b3\u0400\u0404\u0409\u04a3\u04a5\u001a\u0005\u0002\u0000"+ - "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ - "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ - "\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ - "\u0007B\u0000\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007D\u0000"+ - "\u0007%\u0000\u0007N\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007X"+ - "\u0000\u0007W\u0000\u0007C\u0000"; + "\u0000\n\u0140\u0001\u0000\u0000\u0000\n\u0142\u0001\u0000\u0000\u0000"+ + "\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000\u0000"+ + "\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000\u0000"+ + "\u000b\u014c\u0001\u0000\u0000\u0000\u000b\u014e\u0001\u0000\u0000\u0000"+ + "\f\u0150\u0001\u0000\u0000\u0000\f\u0152\u0001\u0000\u0000\u0000\f\u0154"+ + "\u0001\u0000\u0000\u0000\f\u0156\u0001\u0000\u0000\u0000\f\u0158\u0001"+ + "\u0000\u0000\u0000\r\u015a\u0001\u0000\u0000\u0000\r\u015c\u0001\u0000"+ + "\u0000\u0000\r\u015e\u0001\u0000\u0000\u0000\r\u0160\u0001\u0000\u0000"+ + "\u0000\r\u0162\u0001\u0000\u0000\u0000\r\u0164\u0001\u0000\u0000\u0000"+ + "\r\u0166\u0001\u0000\u0000\u0000\r\u0168\u0001\u0000\u0000\u0000\u000e"+ + "\u016a\u0001\u0000\u0000\u0000\u0010\u0174\u0001\u0000\u0000\u0000\u0012"+ + "\u017b\u0001\u0000\u0000\u0000\u0014\u0184\u0001\u0000\u0000\u0000\u0016"+ + "\u018b\u0001\u0000\u0000\u0000\u0018\u0195\u0001\u0000\u0000\u0000\u001a"+ + "\u019c\u0001\u0000\u0000\u0000\u001c\u01a3\u0001\u0000\u0000\u0000\u001e"+ + "\u01b1\u0001\u0000\u0000\u0000 \u01b8\u0001\u0000\u0000\u0000\"\u01c0"+ + "\u0001\u0000\u0000\u0000$\u01c7\u0001\u0000\u0000\u0000&\u01d1\u0001\u0000"+ + "\u0000\u0000(\u01dd\u0001\u0000\u0000\u0000*\u01e6\u0001\u0000\u0000\u0000"+ + ",\u01ec\u0001\u0000\u0000\u0000.\u01f3\u0001\u0000\u0000\u00000\u01fa"+ + "\u0001\u0000\u0000\u00002\u0202\u0001\u0000\u0000\u00004\u020b\u0001\u0000"+ + "\u0000\u00006\u0211\u0001\u0000\u0000\u00008\u0222\u0001\u0000\u0000\u0000"+ + ":\u0232\u0001\u0000\u0000\u0000<\u023b\u0001\u0000\u0000\u0000>\u023e"+ + "\u0001\u0000\u0000\u0000@\u0242\u0001\u0000\u0000\u0000B\u0247\u0001\u0000"+ + "\u0000\u0000D\u024c\u0001\u0000\u0000\u0000F\u0250\u0001\u0000\u0000\u0000"+ + "H\u0254\u0001\u0000\u0000\u0000J\u0258\u0001\u0000\u0000\u0000L\u025c"+ + "\u0001\u0000\u0000\u0000N\u025e\u0001\u0000\u0000\u0000P\u0260\u0001\u0000"+ + "\u0000\u0000R\u0263\u0001\u0000\u0000\u0000T\u0265\u0001\u0000\u0000\u0000"+ + "V\u026e\u0001\u0000\u0000\u0000X\u0270\u0001\u0000\u0000\u0000Z\u0275"+ + "\u0001\u0000\u0000\u0000\\\u0277\u0001\u0000\u0000\u0000^\u027c\u0001"+ + "\u0000\u0000\u0000`\u029b\u0001\u0000\u0000\u0000b\u029e\u0001\u0000\u0000"+ + "\u0000d\u02cc\u0001\u0000\u0000\u0000f\u02ce\u0001\u0000\u0000\u0000h"+ + "\u02d1\u0001\u0000\u0000\u0000j\u02d5\u0001\u0000\u0000\u0000l\u02d9\u0001"+ + "\u0000\u0000\u0000n\u02db\u0001\u0000\u0000\u0000p\u02de\u0001\u0000\u0000"+ + "\u0000r\u02e0\u0001\u0000\u0000\u0000t\u02e5\u0001\u0000\u0000\u0000v"+ + "\u02e7\u0001\u0000\u0000\u0000x\u02ed\u0001\u0000\u0000\u0000z\u02f3\u0001"+ + "\u0000\u0000\u0000|\u02f8\u0001\u0000\u0000\u0000~\u02fa\u0001\u0000\u0000"+ + "\u0000\u0080\u02fd\u0001\u0000\u0000\u0000\u0082\u0300\u0001\u0000\u0000"+ + "\u0000\u0084\u0305\u0001\u0000\u0000\u0000\u0086\u0309\u0001\u0000\u0000"+ + "\u0000\u0088\u030e\u0001\u0000\u0000\u0000\u008a\u0314\u0001\u0000\u0000"+ + "\u0000\u008c\u0317\u0001\u0000\u0000\u0000\u008e\u0319\u0001\u0000\u0000"+ + "\u0000\u0090\u031f\u0001\u0000\u0000\u0000\u0092\u0321\u0001\u0000\u0000"+ + "\u0000\u0094\u0326\u0001\u0000\u0000\u0000\u0096\u0329\u0001\u0000\u0000"+ + "\u0000\u0098\u032c\u0001\u0000\u0000\u0000\u009a\u032f\u0001\u0000\u0000"+ + "\u0000\u009c\u0331\u0001\u0000\u0000\u0000\u009e\u0334\u0001\u0000\u0000"+ + "\u0000\u00a0\u0336\u0001\u0000\u0000\u0000\u00a2\u0339\u0001\u0000\u0000"+ + "\u0000\u00a4\u033b\u0001\u0000\u0000\u0000\u00a6\u033d\u0001\u0000\u0000"+ + "\u0000\u00a8\u033f\u0001\u0000\u0000\u0000\u00aa\u0341\u0001\u0000\u0000"+ + "\u0000\u00ac\u0343\u0001\u0000\u0000\u0000\u00ae\u0348\u0001\u0000\u0000"+ + "\u0000\u00b0\u035d\u0001\u0000\u0000\u0000\u00b2\u035f\u0001\u0000\u0000"+ + "\u0000\u00b4\u0367\u0001\u0000\u0000\u0000\u00b6\u0369\u0001\u0000\u0000"+ + "\u0000\u00b8\u036d\u0001\u0000\u0000\u0000\u00ba\u0371\u0001\u0000\u0000"+ + "\u0000\u00bc\u0375\u0001\u0000\u0000\u0000\u00be\u037a\u0001\u0000\u0000"+ + "\u0000\u00c0\u037e\u0001\u0000\u0000\u0000\u00c2\u0382\u0001\u0000\u0000"+ + "\u0000\u00c4\u0386\u0001\u0000\u0000\u0000\u00c6\u038a\u0001\u0000\u0000"+ + "\u0000\u00c8\u038e\u0001\u0000\u0000\u0000\u00ca\u0396\u0001\u0000\u0000"+ + "\u0000\u00cc\u039f\u0001\u0000\u0000\u0000\u00ce\u03a3\u0001\u0000\u0000"+ + "\u0000\u00d0\u03a7\u0001\u0000\u0000\u0000\u00d2\u03ab\u0001\u0000\u0000"+ + "\u0000\u00d4\u03af\u0001\u0000\u0000\u0000\u00d6\u03b4\u0001\u0000\u0000"+ + "\u0000\u00d8\u03b8\u0001\u0000\u0000\u0000\u00da\u03c0\u0001\u0000\u0000"+ + "\u0000\u00dc\u03d5\u0001\u0000\u0000\u0000\u00de\u03d9\u0001\u0000\u0000"+ + "\u0000\u00e0\u03dd\u0001\u0000\u0000\u0000\u00e2\u03e1\u0001\u0000\u0000"+ + "\u0000\u00e4\u03e5\u0001\u0000\u0000\u0000\u00e6\u03e9\u0001\u0000\u0000"+ + "\u0000\u00e8\u03ee\u0001\u0000\u0000\u0000\u00ea\u03f2\u0001\u0000\u0000"+ + "\u0000\u00ec\u03f6\u0001\u0000\u0000\u0000\u00ee\u03fa\u0001\u0000\u0000"+ + "\u0000\u00f0\u03fd\u0001\u0000\u0000\u0000\u00f2\u0401\u0001\u0000\u0000"+ + "\u0000\u00f4\u0405\u0001\u0000\u0000\u0000\u00f6\u0409\u0001\u0000\u0000"+ + "\u0000\u00f8\u040d\u0001\u0000\u0000\u0000\u00fa\u0412\u0001\u0000\u0000"+ + "\u0000\u00fc\u0417\u0001\u0000\u0000\u0000\u00fe\u041c\u0001\u0000\u0000"+ + "\u0000\u0100\u0423\u0001\u0000\u0000\u0000\u0102\u042c\u0001\u0000\u0000"+ + "\u0000\u0104\u0433\u0001\u0000\u0000\u0000\u0106\u0437\u0001\u0000\u0000"+ + "\u0000\u0108\u043b\u0001\u0000\u0000\u0000\u010a\u043f\u0001\u0000\u0000"+ + "\u0000\u010c\u0443\u0001\u0000\u0000\u0000\u010e\u0447\u0001\u0000\u0000"+ + "\u0000\u0110\u044d\u0001\u0000\u0000\u0000\u0112\u0451\u0001\u0000\u0000"+ + "\u0000\u0114\u0455\u0001\u0000\u0000\u0000\u0116\u0459\u0001\u0000\u0000"+ + "\u0000\u0118\u045d\u0001\u0000\u0000\u0000\u011a\u0461\u0001\u0000\u0000"+ + "\u0000\u011c\u0465\u0001\u0000\u0000\u0000\u011e\u0469\u0001\u0000\u0000"+ + "\u0000\u0120\u046d\u0001\u0000\u0000\u0000\u0122\u0471\u0001\u0000\u0000"+ + "\u0000\u0124\u0476\u0001\u0000\u0000\u0000\u0126\u047a\u0001\u0000\u0000"+ + "\u0000\u0128\u047e\u0001\u0000\u0000\u0000\u012a\u0482\u0001\u0000\u0000"+ + "\u0000\u012c\u0486\u0001\u0000\u0000\u0000\u012e\u048a\u0001\u0000\u0000"+ + "\u0000\u0130\u048e\u0001\u0000\u0000\u0000\u0132\u0493\u0001\u0000\u0000"+ + "\u0000\u0134\u0498\u0001\u0000\u0000\u0000\u0136\u049c\u0001\u0000\u0000"+ + "\u0000\u0138\u04a0\u0001\u0000\u0000\u0000\u013a\u04a4\u0001\u0000\u0000"+ + "\u0000\u013c\u04a9\u0001\u0000\u0000\u0000\u013e\u04b3\u0001\u0000\u0000"+ + "\u0000\u0140\u04b7\u0001\u0000\u0000\u0000\u0142\u04bb\u0001\u0000\u0000"+ + "\u0000\u0144\u04bf\u0001\u0000\u0000\u0000\u0146\u04c4\u0001\u0000\u0000"+ + "\u0000\u0148\u04cb\u0001\u0000\u0000\u0000\u014a\u04cf\u0001\u0000\u0000"+ + "\u0000\u014c\u04d3\u0001\u0000\u0000\u0000\u014e\u04d7\u0001\u0000\u0000"+ + "\u0000\u0150\u04db\u0001\u0000\u0000\u0000\u0152\u04e0\u0001\u0000\u0000"+ + "\u0000\u0154\u04e6\u0001\u0000\u0000\u0000\u0156\u04ea\u0001\u0000\u0000"+ + "\u0000\u0158\u04ee\u0001\u0000\u0000\u0000\u015a\u04f2\u0001\u0000\u0000"+ + "\u0000\u015c\u04f8\u0001\u0000\u0000\u0000\u015e\u04fc\u0001\u0000\u0000"+ + "\u0000\u0160\u0500\u0001\u0000\u0000\u0000\u0162\u0504\u0001\u0000\u0000"+ + "\u0000\u0164\u050a\u0001\u0000\u0000\u0000\u0166\u0510\u0001\u0000\u0000"+ + "\u0000\u0168\u0516\u0001\u0000\u0000\u0000\u016a\u016b\u0005d\u0000\u0000"+ + "\u016b\u016c\u0005i\u0000\u0000\u016c\u016d\u0005s\u0000\u0000\u016d\u016e"+ + "\u0005s\u0000\u0000\u016e\u016f\u0005e\u0000\u0000\u016f\u0170\u0005c"+ + "\u0000\u0000\u0170\u0171\u0005t\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ + "\u0000\u0172\u0173\u0006\u0000\u0000\u0000\u0173\u000f\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0005d\u0000\u0000\u0175\u0176\u0005r\u0000\u0000\u0176"+ + "\u0177\u0005o\u0000\u0000\u0177\u0178\u0005p\u0000\u0000\u0178\u0179\u0001"+ + "\u0000\u0000\u0000\u0179\u017a\u0006\u0001\u0001\u0000\u017a\u0011\u0001"+ + "\u0000\u0000\u0000\u017b\u017c\u0005e\u0000\u0000\u017c\u017d\u0005n\u0000"+ + "\u0000\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005i\u0000\u0000\u017f"+ + "\u0180\u0005c\u0000\u0000\u0180\u0181\u0005h\u0000\u0000\u0181\u0182\u0001"+ + "\u0000\u0000\u0000\u0182\u0183\u0006\u0002\u0002\u0000\u0183\u0013\u0001"+ + "\u0000\u0000\u0000\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005v\u0000"+ + "\u0000\u0186\u0187\u0005a\u0000\u0000\u0187\u0188\u0005l\u0000\u0000\u0188"+ + "\u0189\u0001\u0000\u0000\u0000\u0189\u018a\u0006\u0003\u0000\u0000\u018a"+ + "\u0015\u0001\u0000\u0000\u0000\u018b\u018c\u0005e\u0000\u0000\u018c\u018d"+ + "\u0005x\u0000\u0000\u018d\u018e\u0005p\u0000\u0000\u018e\u018f\u0005l"+ + "\u0000\u0000\u018f\u0190\u0005a\u0000\u0000\u0190\u0191\u0005i\u0000\u0000"+ + "\u0191\u0192\u0005n\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ + "\u0194\u0006\u0004\u0003\u0000\u0194\u0017\u0001\u0000\u0000\u0000\u0195"+ + "\u0196\u0005f\u0000\u0000\u0196\u0197\u0005r\u0000\u0000\u0197\u0198\u0005"+ + "o\u0000\u0000\u0198\u0199\u0005m\u0000\u0000\u0199\u019a\u0001\u0000\u0000"+ + "\u0000\u019a\u019b\u0006\u0005\u0004\u0000\u019b\u0019\u0001\u0000\u0000"+ + "\u0000\u019c\u019d\u0005g\u0000\u0000\u019d\u019e\u0005r\u0000\u0000\u019e"+ + "\u019f\u0005o\u0000\u0000\u019f\u01a0\u0005k\u0000\u0000\u01a0\u01a1\u0001"+ + "\u0000\u0000\u0000\u01a1\u01a2\u0006\u0006\u0000\u0000\u01a2\u001b\u0001"+ + "\u0000\u0000\u0000\u01a3\u01a4\u0005i\u0000\u0000\u01a4\u01a5\u0005n\u0000"+ + "\u0000\u01a5\u01a6\u0005l\u0000\u0000\u01a6\u01a7\u0005i\u0000\u0000\u01a7"+ + "\u01a8\u0005n\u0000\u0000\u01a8\u01a9\u0005e\u0000\u0000\u01a9\u01aa\u0005"+ + "s\u0000\u0000\u01aa\u01ab\u0005t\u0000\u0000\u01ab\u01ac\u0005a\u0000"+ + "\u0000\u01ac\u01ad\u0005t\u0000\u0000\u01ad\u01ae\u0005s\u0000\u0000\u01ae"+ + "\u01af\u0001\u0000\u0000\u0000\u01af\u01b0\u0006\u0007\u0000\u0000\u01b0"+ + "\u001d\u0001\u0000\u0000\u0000\u01b1\u01b2\u0005k\u0000\u0000\u01b2\u01b3"+ + "\u0005e\u0000\u0000\u01b3\u01b4\u0005e\u0000\u0000\u01b4\u01b5\u0005p"+ + "\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000\u01b6\u01b7\u0006\b\u0001"+ + "\u0000\u01b7\u001f\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005l\u0000\u0000"+ + "\u01b9\u01ba\u0005i\u0000\u0000\u01ba\u01bb\u0005m\u0000\u0000\u01bb\u01bc"+ + "\u0005i\u0000\u0000\u01bc\u01bd\u0005t\u0000\u0000\u01bd\u01be\u0001\u0000"+ + "\u0000\u0000\u01be\u01bf\u0006\t\u0000\u0000\u01bf!\u0001\u0000\u0000"+ + "\u0000\u01c0\u01c1\u0005m\u0000\u0000\u01c1\u01c2\u0005e\u0000\u0000\u01c2"+ + "\u01c3\u0005t\u0000\u0000\u01c3\u01c4\u0005a\u0000\u0000\u01c4\u01c5\u0001"+ + "\u0000\u0000\u0000\u01c5\u01c6\u0006\n\u0005\u0000\u01c6#\u0001\u0000"+ + "\u0000\u0000\u01c7\u01c8\u0005m\u0000\u0000\u01c8\u01c9\u0005e\u0000\u0000"+ + "\u01c9\u01ca\u0005t\u0000\u0000\u01ca\u01cb\u0005r\u0000\u0000\u01cb\u01cc"+ + "\u0005i\u0000\u0000\u01cc\u01cd\u0005c\u0000\u0000\u01cd\u01ce\u0005s"+ + "\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d0\u0006\u000b"+ + "\u0006\u0000\u01d0%\u0001\u0000\u0000\u0000\u01d1\u01d2\u0005m\u0000\u0000"+ + "\u01d2\u01d3\u0005v\u0000\u0000\u01d3\u01d4\u0005_\u0000\u0000\u01d4\u01d5"+ + "\u0005e\u0000\u0000\u01d5\u01d6\u0005x\u0000\u0000\u01d6\u01d7\u0005p"+ + "\u0000\u0000\u01d7\u01d8\u0005a\u0000\u0000\u01d8\u01d9\u0005n\u0000\u0000"+ + "\u01d9\u01da\u0005d\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db"+ + "\u01dc\u0006\f\u0007\u0000\u01dc\'\u0001\u0000\u0000\u0000\u01dd\u01de"+ + "\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0005n"+ + "\u0000\u0000\u01e0\u01e1\u0005a\u0000\u0000\u01e1\u01e2\u0005m\u0000\u0000"+ + "\u01e2\u01e3\u0005e\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4"+ + "\u01e5\u0006\r\b\u0000\u01e5)\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ + "r\u0000\u0000\u01e7\u01e8\u0005o\u0000\u0000\u01e8\u01e9\u0005w\u0000"+ + "\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01eb\u0006\u000e\u0000"+ + "\u0000\u01eb+\u0001\u0000\u0000\u0000\u01ec\u01ed\u0005s\u0000\u0000\u01ed"+ + "\u01ee\u0005h\u0000\u0000\u01ee\u01ef\u0005o\u0000\u0000\u01ef\u01f0\u0005"+ + "w\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006\u000f"+ + "\t\u0000\u01f2-\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005s\u0000\u0000"+ + "\u01f4\u01f5\u0005o\u0000\u0000\u01f5\u01f6\u0005r\u0000\u0000\u01f6\u01f7"+ + "\u0005t\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006"+ + "\u0010\u0000\u0000\u01f9/\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005s\u0000"+ + "\u0000\u01fb\u01fc\u0005t\u0000\u0000\u01fc\u01fd\u0005a\u0000\u0000\u01fd"+ + "\u01fe\u0005t\u0000\u0000\u01fe\u01ff\u0005s\u0000\u0000\u01ff\u0200\u0001"+ + "\u0000\u0000\u0000\u0200\u0201\u0006\u0011\u0000\u0000\u02011\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0005w\u0000\u0000\u0203\u0204\u0005h\u0000\u0000"+ + "\u0204\u0205\u0005e\u0000\u0000\u0205\u0206\u0005r\u0000\u0000\u0206\u0207"+ + "\u0005e\u0000\u0000\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209\u0006"+ + "\u0012\u0000\u0000\u02093\u0001\u0000\u0000\u0000\u020a\u020c\b\u0000"+ + "\u0000\u0000\u020b\u020a\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000"+ + "\u0000\u0000\u020d\u020b\u0001\u0000\u0000\u0000\u020d\u020e\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020f\u0210\u0006\u0013"+ + "\u0000\u0000\u02105\u0001\u0000\u0000\u0000\u0211\u0212\u0005/\u0000\u0000"+ + "\u0212\u0213\u0005/\u0000\u0000\u0213\u0217\u0001\u0000\u0000\u0000\u0214"+ + "\u0216\b\u0001\u0000\u0000\u0215\u0214\u0001\u0000\u0000\u0000\u0216\u0219"+ + "\u0001\u0000\u0000\u0000\u0217\u0215\u0001\u0000\u0000\u0000\u0217\u0218"+ + "\u0001\u0000\u0000\u0000\u0218\u021b\u0001\u0000\u0000\u0000\u0219\u0217"+ + "\u0001\u0000\u0000\u0000\u021a\u021c\u0005\r\u0000\u0000\u021b\u021a\u0001"+ + "\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021e\u0001"+ + "\u0000\u0000\u0000\u021d\u021f\u0005\n\u0000\u0000\u021e\u021d\u0001\u0000"+ + "\u0000\u0000\u021e\u021f\u0001\u0000\u0000\u0000\u021f\u0220\u0001\u0000"+ + "\u0000\u0000\u0220\u0221\u0006\u0014\n\u0000\u02217\u0001\u0000\u0000"+ + "\u0000\u0222\u0223\u0005/\u0000\u0000\u0223\u0224\u0005*\u0000\u0000\u0224"+ + "\u0229\u0001\u0000\u0000\u0000\u0225\u0228\u00038\u0015\u0000\u0226\u0228"+ + "\t\u0000\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0227\u0226\u0001"+ + "\u0000\u0000\u0000\u0228\u022b\u0001\u0000\u0000\u0000\u0229\u022a\u0001"+ + "\u0000\u0000\u0000\u0229\u0227\u0001\u0000\u0000\u0000\u022a\u022c\u0001"+ + "\u0000\u0000\u0000\u022b\u0229\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ + "*\u0000\u0000\u022d\u022e\u0005/\u0000\u0000\u022e\u022f\u0001\u0000\u0000"+ + "\u0000\u022f\u0230\u0006\u0015\n\u0000\u02309\u0001\u0000\u0000\u0000"+ + "\u0231\u0233\u0007\u0002\u0000\u0000\u0232\u0231\u0001\u0000\u0000\u0000"+ + "\u0233\u0234\u0001\u0000\u0000\u0000\u0234\u0232\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000\u0000"+ + "\u0236\u0237\u0006\u0016\n\u0000\u0237;\u0001\u0000\u0000\u0000\u0238"+ + "\u023c\b\u0003\u0000\u0000\u0239\u023a\u0005/\u0000\u0000\u023a\u023c"+ + "\b\u0004\u0000\u0000\u023b\u0238\u0001\u0000\u0000\u0000\u023b\u0239\u0001"+ + "\u0000\u0000\u0000\u023c=\u0001\u0000\u0000\u0000\u023d\u023f\u0003<\u0017"+ + "\u0000\u023e\u023d\u0001\u0000\u0000\u0000\u023f\u0240\u0001\u0000\u0000"+ + "\u0000\u0240\u023e\u0001\u0000\u0000\u0000\u0240\u0241\u0001\u0000\u0000"+ + "\u0000\u0241?\u0001\u0000\u0000\u0000\u0242\u0243\u0003\u00acO\u0000\u0243"+ + "\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0006\u0019\u000b\u0000\u0245"+ + "\u0246\u0006\u0019\f\u0000\u0246A\u0001\u0000\u0000\u0000\u0247\u0248"+ + "\u0003J\u001e\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249\u024a\u0006"+ + "\u001a\r\u0000\u024a\u024b\u0006\u001a\u000e\u0000\u024bC\u0001\u0000"+ + "\u0000\u0000\u024c\u024d\u0003:\u0016\u0000\u024d\u024e\u0001\u0000\u0000"+ + "\u0000\u024e\u024f\u0006\u001b\n\u0000\u024fE\u0001\u0000\u0000\u0000"+ + "\u0250\u0251\u00036\u0014\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252"+ + "\u0253\u0006\u001c\n\u0000\u0253G\u0001\u0000\u0000\u0000\u0254\u0255"+ + "\u00038\u0015\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0006"+ + "\u001d\n\u0000\u0257I\u0001\u0000\u0000\u0000\u0258\u0259\u0005|\u0000"+ + "\u0000\u0259\u025a\u0001\u0000\u0000\u0000\u025a\u025b\u0006\u001e\u000e"+ + "\u0000\u025bK\u0001\u0000\u0000\u0000\u025c\u025d\u0007\u0005\u0000\u0000"+ + "\u025dM\u0001\u0000\u0000\u0000\u025e\u025f\u0007\u0006\u0000\u0000\u025f"+ + "O\u0001\u0000\u0000\u0000\u0260\u0261\u0005\\\u0000\u0000\u0261\u0262"+ + "\u0007\u0007\u0000\u0000\u0262Q\u0001\u0000\u0000\u0000\u0263\u0264\b"+ + "\b\u0000\u0000\u0264S\u0001\u0000\u0000\u0000\u0265\u0267\u0007\t\u0000"+ + "\u0000\u0266\u0268\u0007\n\u0000\u0000\u0267\u0266\u0001\u0000\u0000\u0000"+ + "\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000\u0000"+ + "\u0269\u026b\u0003L\u001f\u0000\u026a\u0269\u0001\u0000\u0000\u0000\u026b"+ + "\u026c\u0001\u0000\u0000\u0000\u026c\u026a\u0001\u0000\u0000\u0000\u026c"+ + "\u026d\u0001\u0000\u0000\u0000\u026dU\u0001\u0000\u0000\u0000\u026e\u026f"+ + "\u0005@\u0000\u0000\u026fW\u0001\u0000\u0000\u0000\u0270\u0271\u0005`"+ + "\u0000\u0000\u0271Y\u0001\u0000\u0000\u0000\u0272\u0276\b\u000b\u0000"+ + "\u0000\u0273\u0274\u0005`\u0000\u0000\u0274\u0276\u0005`\u0000\u0000\u0275"+ + "\u0272\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000\u0276"+ + "[\u0001\u0000\u0000\u0000\u0277\u0278\u0005_\u0000\u0000\u0278]\u0001"+ + "\u0000\u0000\u0000\u0279\u027d\u0003N \u0000\u027a\u027d\u0003L\u001f"+ + "\u0000\u027b\u027d\u0003\\\'\u0000\u027c\u0279\u0001\u0000\u0000\u0000"+ + "\u027c\u027a\u0001\u0000\u0000\u0000\u027c\u027b\u0001\u0000\u0000\u0000"+ + "\u027d_\u0001\u0000\u0000\u0000\u027e\u0283\u0005\"\u0000\u0000\u027f"+ + "\u0282\u0003P!\u0000\u0280\u0282\u0003R\"\u0000\u0281\u027f\u0001\u0000"+ + "\u0000\u0000\u0281\u0280\u0001\u0000\u0000\u0000\u0282\u0285\u0001\u0000"+ + "\u0000\u0000\u0283\u0281\u0001\u0000\u0000\u0000\u0283\u0284\u0001\u0000"+ + "\u0000\u0000\u0284\u0286\u0001\u0000\u0000\u0000\u0285\u0283\u0001\u0000"+ + "\u0000\u0000\u0286\u029c\u0005\"\u0000\u0000\u0287\u0288\u0005\"\u0000"+ + "\u0000\u0288\u0289\u0005\"\u0000\u0000\u0289\u028a\u0005\"\u0000\u0000"+ + "\u028a\u028e\u0001\u0000\u0000\u0000\u028b\u028d\b\u0001\u0000\u0000\u028c"+ + "\u028b\u0001\u0000\u0000\u0000\u028d\u0290\u0001\u0000\u0000\u0000\u028e"+ + "\u028f\u0001\u0000\u0000\u0000\u028e\u028c\u0001\u0000\u0000\u0000\u028f"+ + "\u0291\u0001\u0000\u0000\u0000\u0290\u028e\u0001\u0000\u0000\u0000\u0291"+ + "\u0292\u0005\"\u0000\u0000\u0292\u0293\u0005\"\u0000\u0000\u0293\u0294"+ + "\u0005\"\u0000\u0000\u0294\u0296\u0001\u0000\u0000\u0000\u0295\u0297\u0005"+ + "\"\u0000\u0000\u0296\u0295\u0001\u0000\u0000\u0000\u0296\u0297\u0001\u0000"+ + "\u0000\u0000\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u029a\u0005\"\u0000"+ + "\u0000\u0299\u0298\u0001\u0000\u0000\u0000\u0299\u029a\u0001\u0000\u0000"+ + "\u0000\u029a\u029c\u0001\u0000\u0000\u0000\u029b\u027e\u0001\u0000\u0000"+ + "\u0000\u029b\u0287\u0001\u0000\u0000\u0000\u029ca\u0001\u0000\u0000\u0000"+ + "\u029d\u029f\u0003L\u001f\u0000\u029e\u029d\u0001\u0000\u0000\u0000\u029f"+ + "\u02a0\u0001\u0000\u0000\u0000\u02a0\u029e\u0001\u0000\u0000\u0000\u02a0"+ + "\u02a1\u0001\u0000\u0000\u0000\u02a1c\u0001\u0000\u0000\u0000\u02a2\u02a4"+ + "\u0003L\u001f\u0000\u02a3\u02a2\u0001\u0000\u0000\u0000\u02a4\u02a5\u0001"+ + "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a5\u02a6\u0001"+ + "\u0000\u0000\u0000\u02a6\u02a7\u0001\u0000\u0000\u0000\u02a7\u02ab\u0003"+ + "t3\u0000\u02a8\u02aa\u0003L\u001f\u0000\u02a9\u02a8\u0001\u0000\u0000"+ + "\u0000\u02aa\u02ad\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000\u0000"+ + "\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac\u02cd\u0001\u0000\u0000"+ + "\u0000\u02ad\u02ab\u0001\u0000\u0000\u0000\u02ae\u02b0\u0003t3\u0000\u02af"+ + "\u02b1\u0003L\u001f\u0000\u02b0\u02af\u0001\u0000\u0000\u0000\u02b1\u02b2"+ + "\u0001\u0000\u0000\u0000\u02b2\u02b0\u0001\u0000\u0000\u0000\u02b2\u02b3"+ + "\u0001\u0000\u0000\u0000\u02b3\u02cd\u0001\u0000\u0000\u0000\u02b4\u02b6"+ + "\u0003L\u001f\u0000\u02b5\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001"+ + "\u0000\u0000\u0000\u02b7\u02b5\u0001\u0000\u0000\u0000\u02b7\u02b8\u0001"+ + "\u0000\u0000\u0000\u02b8\u02c0\u0001\u0000\u0000\u0000\u02b9\u02bd\u0003"+ + "t3\u0000\u02ba\u02bc\u0003L\u001f\u0000\u02bb\u02ba\u0001\u0000\u0000"+ + "\u0000\u02bc\u02bf\u0001\u0000\u0000\u0000\u02bd\u02bb\u0001\u0000\u0000"+ + "\u0000\u02bd\u02be\u0001\u0000\u0000\u0000\u02be\u02c1\u0001\u0000\u0000"+ + "\u0000\u02bf\u02bd\u0001\u0000\u0000\u0000\u02c0\u02b9\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000\u02c1\u02c2\u0001\u0000\u0000"+ + "\u0000\u02c2\u02c3\u0003T#\u0000\u02c3\u02cd\u0001\u0000\u0000\u0000\u02c4"+ + "\u02c6\u0003t3\u0000\u02c5\u02c7\u0003L\u001f\u0000\u02c6\u02c5\u0001"+ + "\u0000\u0000\u0000\u02c7\u02c8\u0001\u0000\u0000\u0000\u02c8\u02c6\u0001"+ + "\u0000\u0000\u0000\u02c8\u02c9\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cb\u0003T#\u0000\u02cb\u02cd\u0001\u0000\u0000"+ + "\u0000\u02cc\u02a3\u0001\u0000\u0000\u0000\u02cc\u02ae\u0001\u0000\u0000"+ + "\u0000\u02cc\u02b5\u0001\u0000\u0000\u0000\u02cc\u02c4\u0001\u0000\u0000"+ + "\u0000\u02cde\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005b\u0000\u0000\u02cf"+ + "\u02d0\u0005y\u0000\u0000\u02d0g\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005"+ + "a\u0000\u0000\u02d2\u02d3\u0005n\u0000\u0000\u02d3\u02d4\u0005d\u0000"+ + "\u0000\u02d4i\u0001\u0000\u0000\u0000\u02d5\u02d6\u0005a\u0000\u0000\u02d6"+ + "\u02d7\u0005s\u0000\u0000\u02d7\u02d8\u0005c\u0000\u0000\u02d8k\u0001"+ + "\u0000\u0000\u0000\u02d9\u02da\u0005=\u0000\u0000\u02dam\u0001\u0000\u0000"+ + "\u0000\u02db\u02dc\u0005:\u0000\u0000\u02dc\u02dd\u0005:\u0000\u0000\u02dd"+ + "o\u0001\u0000\u0000\u0000\u02de\u02df\u0005,\u0000\u0000\u02dfq\u0001"+ + "\u0000\u0000\u0000\u02e0\u02e1\u0005d\u0000\u0000\u02e1\u02e2\u0005e\u0000"+ + "\u0000\u02e2\u02e3\u0005s\u0000\u0000\u02e3\u02e4\u0005c\u0000\u0000\u02e4"+ + "s\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005.\u0000\u0000\u02e6u\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e8\u0005f\u0000\u0000\u02e8\u02e9\u0005a\u0000"+ + "\u0000\u02e9\u02ea\u0005l\u0000\u0000\u02ea\u02eb\u0005s\u0000\u0000\u02eb"+ + "\u02ec\u0005e\u0000\u0000\u02ecw\u0001\u0000\u0000\u0000\u02ed\u02ee\u0005"+ + "f\u0000\u0000\u02ee\u02ef\u0005i\u0000\u0000\u02ef\u02f0\u0005r\u0000"+ + "\u0000\u02f0\u02f1\u0005s\u0000\u0000\u02f1\u02f2\u0005t\u0000\u0000\u02f2"+ + "y\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005l\u0000\u0000\u02f4\u02f5\u0005"+ + "a\u0000\u0000\u02f5\u02f6\u0005s\u0000\u0000\u02f6\u02f7\u0005t\u0000"+ + "\u0000\u02f7{\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005(\u0000\u0000\u02f9"+ + "}\u0001\u0000\u0000\u0000\u02fa\u02fb\u0005i\u0000\u0000\u02fb\u02fc\u0005"+ + "n\u0000\u0000\u02fc\u007f\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005i\u0000"+ + "\u0000\u02fe\u02ff\u0005s\u0000\u0000\u02ff\u0081\u0001\u0000\u0000\u0000"+ + "\u0300\u0301\u0005l\u0000\u0000\u0301\u0302\u0005i\u0000\u0000\u0302\u0303"+ + "\u0005k\u0000\u0000\u0303\u0304\u0005e\u0000\u0000\u0304\u0083\u0001\u0000"+ + "\u0000\u0000\u0305\u0306\u0005n\u0000\u0000\u0306\u0307\u0005o\u0000\u0000"+ + "\u0307\u0308\u0005t\u0000\u0000\u0308\u0085\u0001\u0000\u0000\u0000\u0309"+ + "\u030a\u0005n\u0000\u0000\u030a\u030b\u0005u\u0000\u0000\u030b\u030c\u0005"+ + "l\u0000\u0000\u030c\u030d\u0005l\u0000\u0000\u030d\u0087\u0001\u0000\u0000"+ + "\u0000\u030e\u030f\u0005n\u0000\u0000\u030f\u0310\u0005u\u0000\u0000\u0310"+ + "\u0311\u0005l\u0000\u0000\u0311\u0312\u0005l\u0000\u0000\u0312\u0313\u0005"+ + "s\u0000\u0000\u0313\u0089\u0001\u0000\u0000\u0000\u0314\u0315\u0005o\u0000"+ + "\u0000\u0315\u0316\u0005r\u0000\u0000\u0316\u008b\u0001\u0000\u0000\u0000"+ + "\u0317\u0318\u0005?\u0000\u0000\u0318\u008d\u0001\u0000\u0000\u0000\u0319"+ + "\u031a\u0005r\u0000\u0000\u031a\u031b\u0005l\u0000\u0000\u031b\u031c\u0005"+ + "i\u0000\u0000\u031c\u031d\u0005k\u0000\u0000\u031d\u031e\u0005e\u0000"+ + "\u0000\u031e\u008f\u0001\u0000\u0000\u0000\u031f\u0320\u0005)\u0000\u0000"+ + "\u0320\u0091\u0001\u0000\u0000\u0000\u0321\u0322\u0005t\u0000\u0000\u0322"+ + "\u0323\u0005r\u0000\u0000\u0323\u0324\u0005u\u0000\u0000\u0324\u0325\u0005"+ + "e\u0000\u0000\u0325\u0093\u0001\u0000\u0000\u0000\u0326\u0327\u0005=\u0000"+ + "\u0000\u0327\u0328\u0005=\u0000\u0000\u0328\u0095\u0001\u0000\u0000\u0000"+ + "\u0329\u032a\u0005=\u0000\u0000\u032a\u032b\u0005~\u0000\u0000\u032b\u0097"+ + "\u0001\u0000\u0000\u0000\u032c\u032d\u0005!\u0000\u0000\u032d\u032e\u0005"+ + "=\u0000\u0000\u032e\u0099\u0001\u0000\u0000\u0000\u032f\u0330\u0005<\u0000"+ + "\u0000\u0330\u009b\u0001\u0000\u0000\u0000\u0331\u0332\u0005<\u0000\u0000"+ + "\u0332\u0333\u0005=\u0000\u0000\u0333\u009d\u0001\u0000\u0000\u0000\u0334"+ + "\u0335\u0005>\u0000\u0000\u0335\u009f\u0001\u0000\u0000\u0000\u0336\u0337"+ + "\u0005>\u0000\u0000\u0337\u0338\u0005=\u0000\u0000\u0338\u00a1\u0001\u0000"+ + "\u0000\u0000\u0339\u033a\u0005+\u0000\u0000\u033a\u00a3\u0001\u0000\u0000"+ + "\u0000\u033b\u033c\u0005-\u0000\u0000\u033c\u00a5\u0001\u0000\u0000\u0000"+ + "\u033d\u033e\u0005*\u0000\u0000\u033e\u00a7\u0001\u0000\u0000\u0000\u033f"+ + "\u0340\u0005/\u0000\u0000\u0340\u00a9\u0001\u0000\u0000\u0000\u0341\u0342"+ + "\u0005%\u0000\u0000\u0342\u00ab\u0001\u0000\u0000\u0000\u0343\u0344\u0005"+ + "[\u0000\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006O\u0000"+ + "\u0000\u0346\u0347\u0006O\u0000\u0000\u0347\u00ad\u0001\u0000\u0000\u0000"+ + "\u0348\u0349\u0005]\u0000\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a"+ + "\u034b\u0006P\u000e\u0000\u034b\u034c\u0006P\u000e\u0000\u034c\u00af\u0001"+ + "\u0000\u0000\u0000\u034d\u0351\u0003N \u0000\u034e\u0350\u0003^(\u0000"+ + "\u034f\u034e\u0001\u0000\u0000\u0000\u0350\u0353\u0001\u0000\u0000\u0000"+ + "\u0351\u034f\u0001\u0000\u0000\u0000\u0351\u0352\u0001\u0000\u0000\u0000"+ + "\u0352\u035e\u0001\u0000\u0000\u0000\u0353\u0351\u0001\u0000\u0000\u0000"+ + "\u0354\u0357\u0003\\\'\u0000\u0355\u0357\u0003V$\u0000\u0356\u0354\u0001"+ + "\u0000\u0000\u0000\u0356\u0355\u0001\u0000\u0000\u0000\u0357\u0359\u0001"+ + "\u0000\u0000\u0000\u0358\u035a\u0003^(\u0000\u0359\u0358\u0001\u0000\u0000"+ + "\u0000\u035a\u035b\u0001\u0000\u0000\u0000\u035b\u0359\u0001\u0000\u0000"+ + "\u0000\u035b\u035c\u0001\u0000\u0000\u0000\u035c\u035e\u0001\u0000\u0000"+ + "\u0000\u035d\u034d\u0001\u0000\u0000\u0000\u035d\u0356\u0001\u0000\u0000"+ + "\u0000\u035e\u00b1\u0001\u0000\u0000\u0000\u035f\u0361\u0003X%\u0000\u0360"+ + "\u0362\u0003Z&\u0000\u0361\u0360\u0001\u0000\u0000\u0000\u0362\u0363\u0001"+ + "\u0000\u0000\u0000\u0363\u0361\u0001\u0000\u0000\u0000\u0363\u0364\u0001"+ + "\u0000\u0000\u0000\u0364\u0365\u0001\u0000\u0000\u0000\u0365\u0366\u0003"+ + "X%\u0000\u0366\u00b3\u0001\u0000\u0000\u0000\u0367\u0368\u0003\u00b2R"+ + "\u0000\u0368\u00b5\u0001\u0000\u0000\u0000\u0369\u036a\u00036\u0014\u0000"+ + "\u036a\u036b\u0001\u0000\u0000\u0000\u036b\u036c\u0006T\n\u0000\u036c"+ + "\u00b7\u0001\u0000\u0000\u0000\u036d\u036e\u00038\u0015\u0000\u036e\u036f"+ + "\u0001\u0000\u0000\u0000\u036f\u0370\u0006U\n\u0000\u0370\u00b9\u0001"+ + "\u0000\u0000\u0000\u0371\u0372\u0003:\u0016\u0000\u0372\u0373\u0001\u0000"+ + "\u0000\u0000\u0373\u0374\u0006V\n\u0000\u0374\u00bb\u0001\u0000\u0000"+ + "\u0000\u0375\u0376\u0003J\u001e\u0000\u0376\u0377\u0001\u0000\u0000\u0000"+ + "\u0377\u0378\u0006W\r\u0000\u0378\u0379\u0006W\u000e\u0000\u0379\u00bd"+ + "\u0001\u0000\u0000\u0000\u037a\u037b\u0003\u00acO\u0000\u037b\u037c\u0001"+ + "\u0000\u0000\u0000\u037c\u037d\u0006X\u000b\u0000\u037d\u00bf\u0001\u0000"+ + "\u0000\u0000\u037e\u037f\u0003\u00aeP\u0000\u037f\u0380\u0001\u0000\u0000"+ + "\u0000\u0380\u0381\u0006Y\u000f\u0000\u0381\u00c1\u0001\u0000\u0000\u0000"+ + "\u0382\u0383\u0003p1\u0000\u0383\u0384\u0001\u0000\u0000\u0000\u0384\u0385"+ + "\u0006Z\u0010\u0000\u0385\u00c3\u0001\u0000\u0000\u0000\u0386\u0387\u0003"+ + "l/\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388\u0389\u0006[\u0011"+ + "\u0000\u0389\u00c5\u0001\u0000\u0000\u0000\u038a\u038b\u0003`)\u0000\u038b"+ + "\u038c\u0001\u0000\u0000\u0000\u038c\u038d\u0006\\\u0012\u0000\u038d\u00c7"+ + "\u0001\u0000\u0000\u0000\u038e\u038f\u0005o\u0000\u0000\u038f\u0390\u0005"+ + "p\u0000\u0000\u0390\u0391\u0005t\u0000\u0000\u0391\u0392\u0005i\u0000"+ + "\u0000\u0392\u0393\u0005o\u0000\u0000\u0393\u0394\u0005n\u0000\u0000\u0394"+ + "\u0395\u0005s\u0000\u0000\u0395\u00c9\u0001\u0000\u0000\u0000\u0396\u0397"+ + "\u0005m\u0000\u0000\u0397\u0398\u0005e\u0000\u0000\u0398\u0399\u0005t"+ + "\u0000\u0000\u0399\u039a\u0005a\u0000\u0000\u039a\u039b\u0005d\u0000\u0000"+ + "\u039b\u039c\u0005a\u0000\u0000\u039c\u039d\u0005t\u0000\u0000\u039d\u039e"+ + "\u0005a\u0000\u0000\u039e\u00cb\u0001\u0000\u0000\u0000\u039f\u03a0\u0003"+ + ">\u0018\u0000\u03a0\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006_\u0013"+ + "\u0000\u03a2\u00cd\u0001\u0000\u0000\u0000\u03a3\u03a4\u00036\u0014\u0000"+ + "\u03a4\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0006`\n\u0000\u03a6"+ + "\u00cf\u0001\u0000\u0000\u0000\u03a7\u03a8\u00038\u0015\u0000\u03a8\u03a9"+ + "\u0001\u0000\u0000\u0000\u03a9\u03aa\u0006a\n\u0000\u03aa\u00d1\u0001"+ + "\u0000\u0000\u0000\u03ab\u03ac\u0003:\u0016\u0000\u03ac\u03ad\u0001\u0000"+ + "\u0000\u0000\u03ad\u03ae\u0006b\n\u0000\u03ae\u00d3\u0001\u0000\u0000"+ + "\u0000\u03af\u03b0\u0003J\u001e\u0000\u03b0\u03b1\u0001\u0000\u0000\u0000"+ + "\u03b1\u03b2\u0006c\r\u0000\u03b2\u03b3\u0006c\u000e\u0000\u03b3\u00d5"+ + "\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003t3\u0000\u03b5\u03b6\u0001\u0000"+ + "\u0000\u0000\u03b6\u03b7\u0006d\u0014\u0000\u03b7\u00d7\u0001\u0000\u0000"+ + "\u0000\u03b8\u03b9\u0003p1\u0000\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba"+ + "\u03bb\u0006e\u0010\u0000\u03bb\u00d9\u0001\u0000\u0000\u0000\u03bc\u03c1"+ + "\u0003N \u0000\u03bd\u03c1\u0003L\u001f\u0000\u03be\u03c1\u0003\\\'\u0000"+ + "\u03bf\u03c1\u0003\u00a6L\u0000\u03c0\u03bc\u0001\u0000\u0000\u0000\u03c0"+ + "\u03bd\u0001\u0000\u0000\u0000\u03c0\u03be\u0001\u0000\u0000\u0000\u03c0"+ + "\u03bf\u0001\u0000\u0000\u0000\u03c1\u00db\u0001\u0000\u0000\u0000\u03c2"+ + "\u03c5\u0003N \u0000\u03c3\u03c5\u0003\u00a6L\u0000\u03c4\u03c2\u0001"+ + "\u0000\u0000\u0000\u03c4\u03c3\u0001\u0000\u0000\u0000\u03c5\u03c9\u0001"+ + "\u0000\u0000\u0000\u03c6\u03c8\u0003\u00daf\u0000\u03c7\u03c6\u0001\u0000"+ + "\u0000\u0000\u03c8\u03cb\u0001\u0000\u0000\u0000\u03c9\u03c7\u0001\u0000"+ + "\u0000\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03d6\u0001\u0000"+ + "\u0000\u0000\u03cb\u03c9\u0001\u0000\u0000\u0000\u03cc\u03cf\u0003\\\'"+ + "\u0000\u03cd\u03cf\u0003V$\u0000\u03ce\u03cc\u0001\u0000\u0000\u0000\u03ce"+ + "\u03cd\u0001\u0000\u0000\u0000\u03cf\u03d1\u0001\u0000\u0000\u0000\u03d0"+ + "\u03d2\u0003\u00daf\u0000\u03d1\u03d0\u0001\u0000\u0000\u0000\u03d2\u03d3"+ + "\u0001\u0000\u0000\u0000\u03d3\u03d1\u0001\u0000\u0000\u0000\u03d3\u03d4"+ + "\u0001\u0000\u0000\u0000\u03d4\u03d6\u0001\u0000\u0000\u0000\u03d5\u03c4"+ + "\u0001\u0000\u0000\u0000\u03d5\u03ce\u0001\u0000\u0000\u0000\u03d6\u00dd"+ + "\u0001\u0000\u0000\u0000\u03d7\u03da\u0003\u00dcg\u0000\u03d8\u03da\u0003"+ + "\u00b2R\u0000\u03d9\u03d7\u0001\u0000\u0000\u0000\u03d9\u03d8\u0001\u0000"+ + "\u0000\u0000\u03da\u03db\u0001\u0000\u0000\u0000\u03db\u03d9\u0001\u0000"+ + "\u0000\u0000\u03db\u03dc\u0001\u0000\u0000\u0000\u03dc\u00df\u0001\u0000"+ + "\u0000\u0000\u03dd\u03de\u00036\u0014\u0000\u03de\u03df\u0001\u0000\u0000"+ + "\u0000\u03df\u03e0\u0006i\n\u0000\u03e0\u00e1\u0001\u0000\u0000\u0000"+ + "\u03e1\u03e2\u00038\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3"+ + "\u03e4\u0006j\n\u0000\u03e4\u00e3\u0001\u0000\u0000\u0000\u03e5\u03e6"+ + "\u0003:\u0016\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006"+ + "k\n\u0000\u03e8\u00e5\u0001\u0000\u0000\u0000\u03e9\u03ea\u0003J\u001e"+ + "\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u03ec\u0006l\r\u0000"+ + "\u03ec\u03ed\u0006l\u000e\u0000\u03ed\u00e7\u0001\u0000\u0000\u0000\u03ee"+ + "\u03ef\u0003l/\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006"+ + "m\u0011\u0000\u03f1\u00e9\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003p1"+ + "\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006n\u0010\u0000"+ + "\u03f5\u00eb\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003t3\u0000\u03f7\u03f8"+ + "\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006o\u0014\u0000\u03f9\u00ed\u0001"+ + "\u0000\u0000\u0000\u03fa\u03fb\u0005a\u0000\u0000\u03fb\u03fc\u0005s\u0000"+ + "\u0000\u03fc\u00ef\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003\u00deh\u0000"+ + "\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006q\u0015\u0000\u0400"+ + "\u00f1\u0001\u0000\u0000\u0000\u0401\u0402\u00036\u0014\u0000\u0402\u0403"+ + "\u0001\u0000\u0000\u0000\u0403\u0404\u0006r\n\u0000\u0404\u00f3\u0001"+ + "\u0000\u0000\u0000\u0405\u0406\u00038\u0015\u0000\u0406\u0407\u0001\u0000"+ + "\u0000\u0000\u0407\u0408\u0006s\n\u0000\u0408\u00f5\u0001\u0000\u0000"+ + "\u0000\u0409\u040a\u0003:\u0016\u0000\u040a\u040b\u0001\u0000\u0000\u0000"+ + "\u040b\u040c\u0006t\n\u0000\u040c\u00f7\u0001\u0000\u0000\u0000\u040d"+ + "\u040e\u0003J\u001e\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410"+ + "\u0006u\r\u0000\u0410\u0411\u0006u\u000e\u0000\u0411\u00f9\u0001\u0000"+ + "\u0000\u0000\u0412\u0413\u0003\u00acO\u0000\u0413\u0414\u0001\u0000\u0000"+ + "\u0000\u0414\u0415\u0006v\u000b\u0000\u0415\u0416\u0006v\u0016\u0000\u0416"+ + "\u00fb\u0001\u0000\u0000\u0000\u0417\u0418\u0005o\u0000\u0000\u0418\u0419"+ + "\u0005n\u0000\u0000\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006"+ + "w\u0017\u0000\u041b\u00fd\u0001\u0000\u0000\u0000\u041c\u041d\u0005w\u0000"+ + "\u0000\u041d\u041e\u0005i\u0000\u0000\u041e\u041f\u0005t\u0000\u0000\u041f"+ + "\u0420\u0005h\u0000\u0000\u0420\u0421\u0001\u0000\u0000\u0000\u0421\u0422"+ + "\u0006x\u0017\u0000\u0422\u00ff\u0001\u0000\u0000\u0000\u0423\u0424\b"+ + "\f\u0000\u0000\u0424\u0101\u0001\u0000\u0000\u0000\u0425\u0427\u0003\u0100"+ + "y\u0000\u0426\u0425\u0001\u0000\u0000\u0000\u0427\u0428\u0001\u0000\u0000"+ + "\u0000\u0428\u0426\u0001\u0000\u0000\u0000\u0428\u0429\u0001\u0000\u0000"+ + "\u0000\u0429\u042a\u0001\u0000\u0000\u0000\u042a\u042b\u0003\u0146\u009c"+ + "\u0000\u042b\u042d\u0001\u0000\u0000\u0000\u042c\u0426\u0001\u0000\u0000"+ + "\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d\u042f\u0001\u0000\u0000"+ + "\u0000\u042e\u0430\u0003\u0100y\u0000\u042f\u042e\u0001\u0000\u0000\u0000"+ + "\u0430\u0431\u0001\u0000\u0000\u0000\u0431\u042f\u0001\u0000\u0000\u0000"+ + "\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0103\u0001\u0000\u0000\u0000"+ + "\u0433\u0434\u0003\u00b4S\u0000\u0434\u0435\u0001\u0000\u0000\u0000\u0435"+ + "\u0436\u0006{\u0018\u0000\u0436\u0105\u0001\u0000\u0000\u0000\u0437\u0438"+ + "\u0003\u0102z\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439\u043a\u0006"+ + "|\u0019\u0000\u043a\u0107\u0001\u0000\u0000\u0000\u043b\u043c\u00036\u0014"+ + "\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006}\n\u0000"+ + "\u043e\u0109\u0001\u0000\u0000\u0000\u043f\u0440\u00038\u0015\u0000\u0440"+ + "\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006~\n\u0000\u0442\u010b"+ + "\u0001\u0000\u0000\u0000\u0443\u0444\u0003:\u0016\u0000\u0444\u0445\u0001"+ + "\u0000\u0000\u0000\u0445\u0446\u0006\u007f\n\u0000\u0446\u010d\u0001\u0000"+ + "\u0000\u0000\u0447\u0448\u0003J\u001e\u0000\u0448\u0449\u0001\u0000\u0000"+ + "\u0000\u0449\u044a\u0006\u0080\r\u0000\u044a\u044b\u0006\u0080\u000e\u0000"+ + "\u044b\u044c\u0006\u0080\u000e\u0000\u044c\u010f\u0001\u0000\u0000\u0000"+ + "\u044d\u044e\u0003l/\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f\u0450"+ + "\u0006\u0081\u0011\u0000\u0450\u0111\u0001\u0000\u0000\u0000\u0451\u0452"+ + "\u0003p1\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u0082"+ + "\u0010\u0000\u0454\u0113\u0001\u0000\u0000\u0000\u0455\u0456\u0003t3\u0000"+ + "\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006\u0083\u0014\u0000"+ + "\u0458\u0115\u0001\u0000\u0000\u0000\u0459\u045a\u0003\u00fex\u0000\u045a"+ + "\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006\u0084\u001a\u0000\u045c"+ + "\u0117\u0001\u0000\u0000\u0000\u045d\u045e\u0003\u00deh\u0000\u045e\u045f"+ + "\u0001\u0000\u0000\u0000\u045f\u0460\u0006\u0085\u0015\u0000\u0460\u0119"+ + "\u0001\u0000\u0000\u0000\u0461\u0462\u0003\u00b4S\u0000\u0462\u0463\u0001"+ + "\u0000\u0000\u0000\u0463\u0464\u0006\u0086\u0018\u0000\u0464\u011b\u0001"+ + "\u0000\u0000\u0000\u0465\u0466\u00036\u0014\u0000\u0466\u0467\u0001\u0000"+ + "\u0000\u0000\u0467\u0468\u0006\u0087\n\u0000\u0468\u011d\u0001\u0000\u0000"+ + "\u0000\u0469\u046a\u00038\u0015\u0000\u046a\u046b\u0001\u0000\u0000\u0000"+ + "\u046b\u046c\u0006\u0088\n\u0000\u046c\u011f\u0001\u0000\u0000\u0000\u046d"+ + "\u046e\u0003:\u0016\u0000\u046e\u046f\u0001\u0000\u0000\u0000\u046f\u0470"+ + "\u0006\u0089\n\u0000\u0470\u0121\u0001\u0000\u0000\u0000\u0471\u0472\u0003"+ + "J\u001e\u0000\u0472\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006\u008a"+ + "\r\u0000\u0474\u0475\u0006\u008a\u000e\u0000\u0475\u0123\u0001\u0000\u0000"+ + "\u0000\u0476\u0477\u0003t3\u0000\u0477\u0478\u0001\u0000\u0000\u0000\u0478"+ + "\u0479\u0006\u008b\u0014\u0000\u0479\u0125\u0001\u0000\u0000\u0000\u047a"+ + "\u047b\u0003\u00b4S\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c\u047d"+ + "\u0006\u008c\u0018\u0000\u047d\u0127\u0001\u0000\u0000\u0000\u047e\u047f"+ + "\u0003\u00b0Q\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006"+ + "\u008d\u001b\u0000\u0481\u0129\u0001\u0000\u0000\u0000\u0482\u0483\u0003"+ + "6\u0014\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485\u0006\u008e"+ + "\n\u0000\u0485\u012b\u0001\u0000\u0000\u0000\u0486\u0487\u00038\u0015"+ + "\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488\u0489\u0006\u008f\n\u0000"+ + "\u0489\u012d\u0001\u0000\u0000\u0000\u048a\u048b\u0003:\u0016\u0000\u048b"+ + "\u048c\u0001\u0000\u0000\u0000\u048c\u048d\u0006\u0090\n\u0000\u048d\u012f"+ + "\u0001\u0000\u0000\u0000\u048e\u048f\u0003J\u001e\u0000\u048f\u0490\u0001"+ + "\u0000\u0000\u0000\u0490\u0491\u0006\u0091\r\u0000\u0491\u0492\u0006\u0091"+ + "\u000e\u0000\u0492\u0131\u0001\u0000\u0000\u0000\u0493\u0494\u0005i\u0000"+ + "\u0000\u0494\u0495\u0005n\u0000\u0000\u0495\u0496\u0005f\u0000\u0000\u0496"+ + "\u0497\u0005o\u0000\u0000\u0497\u0133\u0001\u0000\u0000\u0000\u0498\u0499"+ + "\u00036\u0014\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b\u0006"+ + "\u0093\n\u0000\u049b\u0135\u0001\u0000\u0000\u0000\u049c\u049d\u00038"+ + "\u0015\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f\u0006\u0094"+ + "\n\u0000\u049f\u0137\u0001\u0000\u0000\u0000\u04a0\u04a1\u0003:\u0016"+ + "\u0000\u04a1\u04a2\u0001\u0000\u0000\u0000\u04a2\u04a3\u0006\u0095\n\u0000"+ + "\u04a3\u0139\u0001\u0000\u0000\u0000\u04a4\u04a5\u0003J\u001e\u0000\u04a5"+ + "\u04a6\u0001\u0000\u0000\u0000\u04a6\u04a7\u0006\u0096\r\u0000\u04a7\u04a8"+ + "\u0006\u0096\u000e\u0000\u04a8\u013b\u0001\u0000\u0000\u0000\u04a9\u04aa"+ + "\u0005f\u0000\u0000\u04aa\u04ab\u0005u\u0000\u0000\u04ab\u04ac\u0005n"+ + "\u0000\u0000\u04ac\u04ad\u0005c\u0000\u0000\u04ad\u04ae\u0005t\u0000\u0000"+ + "\u04ae\u04af\u0005i\u0000\u0000\u04af\u04b0\u0005o\u0000\u0000\u04b0\u04b1"+ + "\u0005n\u0000\u0000\u04b1\u04b2\u0005s\u0000\u0000\u04b2\u013d\u0001\u0000"+ + "\u0000\u0000\u04b3\u04b4\u00036\u0014\u0000\u04b4\u04b5\u0001\u0000\u0000"+ + "\u0000\u04b5\u04b6\u0006\u0098\n\u0000\u04b6\u013f\u0001\u0000\u0000\u0000"+ + "\u04b7\u04b8\u00038\u0015\u0000\u04b8\u04b9\u0001\u0000\u0000\u0000\u04b9"+ + "\u04ba\u0006\u0099\n\u0000\u04ba\u0141\u0001\u0000\u0000\u0000\u04bb\u04bc"+ + "\u0003:\u0016\u0000\u04bc\u04bd\u0001\u0000\u0000\u0000\u04bd\u04be\u0006"+ + "\u009a\n\u0000\u04be\u0143\u0001\u0000\u0000\u0000\u04bf\u04c0\u0003\u00ae"+ + "P\u0000\u04c0\u04c1\u0001\u0000\u0000\u0000\u04c1\u04c2\u0006\u009b\u000f"+ + "\u0000\u04c2\u04c3\u0006\u009b\u000e\u0000\u04c3\u0145\u0001\u0000\u0000"+ + "\u0000\u04c4\u04c5\u0005:\u0000\u0000\u04c5\u0147\u0001\u0000\u0000\u0000"+ + "\u04c6\u04cc\u0003V$\u0000\u04c7\u04cc\u0003L\u001f\u0000\u04c8\u04cc"+ + "\u0003t3\u0000\u04c9\u04cc\u0003N \u0000\u04ca\u04cc\u0003\\\'\u0000\u04cb"+ + "\u04c6\u0001\u0000\u0000\u0000\u04cb\u04c7\u0001\u0000\u0000\u0000\u04cb"+ + "\u04c8\u0001\u0000\u0000\u0000\u04cb\u04c9\u0001\u0000\u0000\u0000\u04cb"+ + "\u04ca\u0001\u0000\u0000\u0000\u04cc\u04cd\u0001\u0000\u0000\u0000\u04cd"+ + "\u04cb\u0001\u0000\u0000\u0000\u04cd\u04ce\u0001\u0000\u0000\u0000\u04ce"+ + "\u0149\u0001\u0000\u0000\u0000\u04cf\u04d0\u00036\u0014\u0000\u04d0\u04d1"+ + "\u0001\u0000\u0000\u0000\u04d1\u04d2\u0006\u009e\n\u0000\u04d2\u014b\u0001"+ + "\u0000\u0000\u0000\u04d3\u04d4\u00038\u0015\u0000\u04d4\u04d5\u0001\u0000"+ + "\u0000\u0000\u04d5\u04d6\u0006\u009f\n\u0000\u04d6\u014d\u0001\u0000\u0000"+ + "\u0000\u04d7\u04d8\u0003:\u0016\u0000\u04d8\u04d9\u0001\u0000\u0000\u0000"+ + "\u04d9\u04da\u0006\u00a0\n\u0000\u04da\u014f\u0001\u0000\u0000\u0000\u04db"+ + "\u04dc\u0003J\u001e\u0000\u04dc\u04dd\u0001\u0000\u0000\u0000\u04dd\u04de"+ + "\u0006\u00a1\r\u0000\u04de\u04df\u0006\u00a1\u000e\u0000\u04df\u0151\u0001"+ + "\u0000\u0000\u0000\u04e0\u04e1\u0003>\u0018\u0000\u04e1\u04e2\u0001\u0000"+ + "\u0000\u0000\u04e2\u04e3\u0006\u00a2\u0013\u0000\u04e3\u04e4\u0006\u00a2"+ + "\u000e\u0000\u04e4\u04e5\u0006\u00a2\u001c\u0000\u04e5\u0153\u0001\u0000"+ + "\u0000\u0000\u04e6\u04e7\u00036\u0014\u0000\u04e7\u04e8\u0001\u0000\u0000"+ + "\u0000\u04e8\u04e9\u0006\u00a3\n\u0000\u04e9\u0155\u0001\u0000\u0000\u0000"+ + "\u04ea\u04eb\u00038\u0015\u0000\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec"+ + "\u04ed\u0006\u00a4\n\u0000\u04ed\u0157\u0001\u0000\u0000\u0000\u04ee\u04ef"+ + "\u0003:\u0016\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006"+ + "\u00a5\n\u0000\u04f1\u0159\u0001\u0000\u0000\u0000\u04f2\u04f3\u0003p"+ + "1\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f5\u0006\u00a6\u0010"+ + "\u0000\u04f5\u04f6\u0006\u00a6\u000e\u0000\u04f6\u04f7\u0006\u00a6\u0006"+ + "\u0000\u04f7\u015b\u0001\u0000\u0000\u0000\u04f8\u04f9\u00036\u0014\u0000"+ + "\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u00a7\n\u0000\u04fb"+ + "\u015d\u0001\u0000\u0000\u0000\u04fc\u04fd\u00038\u0015\u0000\u04fd\u04fe"+ + "\u0001\u0000\u0000\u0000\u04fe\u04ff\u0006\u00a8\n\u0000\u04ff\u015f\u0001"+ + "\u0000\u0000\u0000\u0500\u0501\u0003:\u0016\u0000\u0501\u0502\u0001\u0000"+ + "\u0000\u0000\u0502\u0503\u0006\u00a9\n\u0000\u0503\u0161\u0001\u0000\u0000"+ + "\u0000\u0504\u0505\u0003\u00b4S\u0000\u0505\u0506\u0001\u0000\u0000\u0000"+ + "\u0506\u0507\u0006\u00aa\u000e\u0000\u0507\u0508\u0006\u00aa\u0000\u0000"+ + "\u0508\u0509\u0006\u00aa\u0018\u0000\u0509\u0163\u0001\u0000\u0000\u0000"+ + "\u050a\u050b\u0003\u00b0Q\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c"+ + "\u050d\u0006\u00ab\u000e\u0000\u050d\u050e\u0006\u00ab\u0000\u0000\u050e"+ + "\u050f\u0006\u00ab\u001b\u0000\u050f\u0165\u0001\u0000\u0000\u0000\u0510"+ + "\u0511\u0003f,\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006"+ + "\u00ac\u000e\u0000\u0513\u0514\u0006\u00ac\u0000\u0000\u0514\u0515\u0006"+ + "\u00ac\u001d\u0000\u0515\u0167\u0001\u0000\u0000\u0000\u0516\u0517\u0003"+ + "J\u001e\u0000\u0517\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u00ad"+ + "\r\u0000\u0519\u051a\u0006\u00ad\u000e\u0000\u051a\u0169\u0001\u0000\u0000"+ + "\u0000<\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u020d\u0217\u021b\u021e\u0227\u0229\u0234\u023b\u0240\u0267\u026c\u0275"+ + "\u027c\u0281\u0283\u028e\u0296\u0299\u029b\u02a0\u02a5\u02ab\u02b2\u02b7"+ + "\u02bd\u02c0\u02c8\u02cc\u0351\u0356\u035b\u035d\u0363\u03c0\u03c4\u03c9"+ + "\u03ce\u03d3\u03d5\u03d9\u03db\u0428\u042c\u0431\u04cb\u04cd\u001e\u0005"+ + "\u0002\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005"+ + "\u0003\u0000\u0005\n\u0000\u0005\f\u0000\u0005\b\u0000\u0005\u0005\u0000"+ + "\u0005\t\u0000\u0000\u0001\u0000\u0007C\u0000\u0005\u0000\u0000\u0007"+ + "\u001c\u0000\u0004\u0000\u0000\u0007D\u0000\u0007%\u0000\u0007#\u0000"+ + "\u0007\u001d\u0000\u0007\u0018\u0000\u0007\'\u0000\u0007O\u0000\u0005"+ + "\u000b\u0000\u0005\u0007\u0000\u0007F\u0000\u0007Y\u0000\u0007X\u0000"+ + "\u0007E\u0000\u0005\r\u0000\u0007 \u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index b4a8e60dd69aa..461605d5f0231 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -11,6 +11,7 @@ null 'keep' 'limit' 'meta' +'metrics' 'mv_expand' 'rename' 'row' @@ -25,6 +26,7 @@ null null null null +null '|' null null @@ -80,7 +82,6 @@ null null null null -null 'as' null null @@ -110,6 +111,12 @@ null null null null +null +null +null +null +null +null token symbolic names: null @@ -124,6 +131,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -135,6 +143,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -186,7 +195,6 @@ EXPR_MULTILINE_COMMENT EXPR_WS OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -223,6 +231,12 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS rule names: singleStatement @@ -241,12 +255,13 @@ rowCommand fields field fromCommand -fromIdentifier +indexIdentifier fromOptions configOption metadata metadataOption deprecated_metadata +metricsCommand evalCommand statsCommand inlinestatsCommand @@ -282,4 +297,4 @@ enrichWithClause atn: -[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 68, 68, 74, 74, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 7, 2, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 3, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 4, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 5, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 6, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 7, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file +[4, 1, 117, 562, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 120, 8, 1, 10, 1, 12, 1, 123, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 131, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 146, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 158, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 165, 8, 5, 10, 5, 12, 5, 168, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 3, 5, 179, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 187, 8, 5, 10, 5, 12, 5, 190, 9, 5, 1, 6, 1, 6, 3, 6, 194, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 206, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 213, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 219, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 227, 8, 8, 10, 8, 12, 8, 230, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 240, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 245, 8, 9, 10, 9, 12, 9, 248, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 256, 8, 10, 10, 10, 12, 10, 259, 9, 10, 3, 10, 261, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 273, 8, 13, 10, 13, 12, 13, 276, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 283, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 289, 8, 15, 10, 15, 12, 15, 292, 9, 15, 1, 15, 3, 15, 295, 8, 15, 1, 15, 3, 15, 298, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 306, 8, 17, 10, 17, 12, 17, 309, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 317, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 323, 8, 20, 10, 20, 12, 20, 326, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 336, 8, 22, 10, 22, 12, 22, 339, 9, 22, 1, 22, 3, 22, 342, 8, 22, 1, 22, 1, 22, 3, 22, 346, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 353, 8, 24, 1, 24, 1, 24, 3, 24, 357, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 363, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 376, 8, 27, 10, 27, 12, 27, 379, 9, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 398, 8, 30, 10, 30, 12, 30, 401, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 409, 8, 30, 10, 30, 12, 30, 412, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 420, 8, 30, 10, 30, 12, 30, 423, 9, 30, 1, 30, 1, 30, 3, 30, 427, 8, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 436, 8, 32, 10, 32, 12, 32, 439, 9, 32, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 33, 1, 33, 3, 33, 447, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 453, 8, 34, 10, 34, 12, 34, 456, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 462, 8, 35, 10, 35, 12, 35, 465, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 471, 8, 36, 10, 36, 12, 36, 474, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 484, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 496, 8, 41, 10, 41, 12, 41, 499, 9, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 3, 44, 509, 8, 44, 1, 45, 3, 45, 512, 8, 45, 1, 45, 1, 45, 1, 46, 3, 46, 517, 8, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 542, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 5, 53, 548, 8, 53, 10, 53, 12, 53, 551, 9, 53, 3, 53, 553, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 558, 8, 54, 1, 54, 1, 54, 1, 54, 0, 4, 2, 10, 16, 18, 55, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 0, 7, 1, 0, 62, 63, 1, 0, 64, 66, 1, 0, 69, 70, 2, 0, 34, 34, 38, 38, 1, 0, 41, 42, 2, 0, 40, 40, 54, 54, 2, 0, 55, 55, 57, 61, 590, 0, 110, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 4, 130, 1, 0, 0, 0, 6, 145, 1, 0, 0, 0, 8, 147, 1, 0, 0, 0, 10, 178, 1, 0, 0, 0, 12, 205, 1, 0, 0, 0, 14, 212, 1, 0, 0, 0, 16, 218, 1, 0, 0, 0, 18, 239, 1, 0, 0, 0, 20, 249, 1, 0, 0, 0, 22, 264, 1, 0, 0, 0, 24, 266, 1, 0, 0, 0, 26, 269, 1, 0, 0, 0, 28, 282, 1, 0, 0, 0, 30, 284, 1, 0, 0, 0, 32, 299, 1, 0, 0, 0, 34, 301, 1, 0, 0, 0, 36, 310, 1, 0, 0, 0, 38, 316, 1, 0, 0, 0, 40, 318, 1, 0, 0, 0, 42, 327, 1, 0, 0, 0, 44, 331, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 350, 1, 0, 0, 0, 50, 358, 1, 0, 0, 0, 52, 364, 1, 0, 0, 0, 54, 372, 1, 0, 0, 0, 56, 380, 1, 0, 0, 0, 58, 382, 1, 0, 0, 0, 60, 426, 1, 0, 0, 0, 62, 428, 1, 0, 0, 0, 64, 431, 1, 0, 0, 0, 66, 440, 1, 0, 0, 0, 68, 448, 1, 0, 0, 0, 70, 457, 1, 0, 0, 0, 72, 466, 1, 0, 0, 0, 74, 475, 1, 0, 0, 0, 76, 479, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 489, 1, 0, 0, 0, 82, 492, 1, 0, 0, 0, 84, 500, 1, 0, 0, 0, 86, 504, 1, 0, 0, 0, 88, 508, 1, 0, 0, 0, 90, 511, 1, 0, 0, 0, 92, 516, 1, 0, 0, 0, 94, 520, 1, 0, 0, 0, 96, 522, 1, 0, 0, 0, 98, 524, 1, 0, 0, 0, 100, 527, 1, 0, 0, 0, 102, 531, 1, 0, 0, 0, 104, 534, 1, 0, 0, 0, 106, 537, 1, 0, 0, 0, 108, 557, 1, 0, 0, 0, 110, 111, 3, 2, 1, 0, 111, 112, 5, 0, 0, 1, 112, 1, 1, 0, 0, 0, 113, 114, 6, 1, -1, 0, 114, 115, 3, 4, 2, 0, 115, 121, 1, 0, 0, 0, 116, 117, 10, 1, 0, 0, 117, 118, 5, 28, 0, 0, 118, 120, 3, 6, 3, 0, 119, 116, 1, 0, 0, 0, 120, 123, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 121, 122, 1, 0, 0, 0, 122, 3, 1, 0, 0, 0, 123, 121, 1, 0, 0, 0, 124, 131, 3, 98, 49, 0, 125, 131, 3, 30, 15, 0, 126, 131, 3, 24, 12, 0, 127, 131, 3, 44, 22, 0, 128, 131, 3, 102, 51, 0, 129, 131, 3, 104, 52, 0, 130, 124, 1, 0, 0, 0, 130, 125, 1, 0, 0, 0, 130, 126, 1, 0, 0, 0, 130, 127, 1, 0, 0, 0, 130, 128, 1, 0, 0, 0, 130, 129, 1, 0, 0, 0, 131, 5, 1, 0, 0, 0, 132, 146, 3, 46, 23, 0, 133, 146, 3, 50, 25, 0, 134, 146, 3, 62, 31, 0, 135, 146, 3, 68, 34, 0, 136, 146, 3, 64, 32, 0, 137, 146, 3, 48, 24, 0, 138, 146, 3, 8, 4, 0, 139, 146, 3, 70, 35, 0, 140, 146, 3, 72, 36, 0, 141, 146, 3, 76, 38, 0, 142, 146, 3, 78, 39, 0, 143, 146, 3, 106, 53, 0, 144, 146, 3, 80, 40, 0, 145, 132, 1, 0, 0, 0, 145, 133, 1, 0, 0, 0, 145, 134, 1, 0, 0, 0, 145, 135, 1, 0, 0, 0, 145, 136, 1, 0, 0, 0, 145, 137, 1, 0, 0, 0, 145, 138, 1, 0, 0, 0, 145, 139, 1, 0, 0, 0, 145, 140, 1, 0, 0, 0, 145, 141, 1, 0, 0, 0, 145, 142, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 145, 144, 1, 0, 0, 0, 146, 7, 1, 0, 0, 0, 147, 148, 5, 19, 0, 0, 148, 149, 3, 10, 5, 0, 149, 9, 1, 0, 0, 0, 150, 151, 6, 5, -1, 0, 151, 152, 5, 47, 0, 0, 152, 179, 3, 10, 5, 7, 153, 179, 3, 14, 7, 0, 154, 179, 3, 12, 6, 0, 155, 157, 3, 14, 7, 0, 156, 158, 5, 47, 0, 0, 157, 156, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 159, 1, 0, 0, 0, 159, 160, 5, 44, 0, 0, 160, 161, 5, 43, 0, 0, 161, 166, 3, 14, 7, 0, 162, 163, 5, 37, 0, 0, 163, 165, 3, 14, 7, 0, 164, 162, 1, 0, 0, 0, 165, 168, 1, 0, 0, 0, 166, 164, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 169, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 169, 170, 5, 53, 0, 0, 170, 179, 1, 0, 0, 0, 171, 172, 3, 14, 7, 0, 172, 174, 5, 45, 0, 0, 173, 175, 5, 47, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 48, 0, 0, 177, 179, 1, 0, 0, 0, 178, 150, 1, 0, 0, 0, 178, 153, 1, 0, 0, 0, 178, 154, 1, 0, 0, 0, 178, 155, 1, 0, 0, 0, 178, 171, 1, 0, 0, 0, 179, 188, 1, 0, 0, 0, 180, 181, 10, 4, 0, 0, 181, 182, 5, 33, 0, 0, 182, 187, 3, 10, 5, 5, 183, 184, 10, 3, 0, 0, 184, 185, 5, 50, 0, 0, 185, 187, 3, 10, 5, 4, 186, 180, 1, 0, 0, 0, 186, 183, 1, 0, 0, 0, 187, 190, 1, 0, 0, 0, 188, 186, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 11, 1, 0, 0, 0, 190, 188, 1, 0, 0, 0, 191, 193, 3, 14, 7, 0, 192, 194, 5, 47, 0, 0, 193, 192, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 195, 1, 0, 0, 0, 195, 196, 5, 46, 0, 0, 196, 197, 3, 94, 47, 0, 197, 206, 1, 0, 0, 0, 198, 200, 3, 14, 7, 0, 199, 201, 5, 47, 0, 0, 200, 199, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 5, 52, 0, 0, 203, 204, 3, 94, 47, 0, 204, 206, 1, 0, 0, 0, 205, 191, 1, 0, 0, 0, 205, 198, 1, 0, 0, 0, 206, 13, 1, 0, 0, 0, 207, 213, 3, 16, 8, 0, 208, 209, 3, 16, 8, 0, 209, 210, 3, 96, 48, 0, 210, 211, 3, 16, 8, 0, 211, 213, 1, 0, 0, 0, 212, 207, 1, 0, 0, 0, 212, 208, 1, 0, 0, 0, 213, 15, 1, 0, 0, 0, 214, 215, 6, 8, -1, 0, 215, 219, 3, 18, 9, 0, 216, 217, 7, 0, 0, 0, 217, 219, 3, 16, 8, 3, 218, 214, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 228, 1, 0, 0, 0, 220, 221, 10, 2, 0, 0, 221, 222, 7, 1, 0, 0, 222, 227, 3, 16, 8, 3, 223, 224, 10, 1, 0, 0, 224, 225, 7, 0, 0, 0, 225, 227, 3, 16, 8, 2, 226, 220, 1, 0, 0, 0, 226, 223, 1, 0, 0, 0, 227, 230, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 17, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 231, 232, 6, 9, -1, 0, 232, 240, 3, 60, 30, 0, 233, 240, 3, 52, 26, 0, 234, 240, 3, 20, 10, 0, 235, 236, 5, 43, 0, 0, 236, 237, 3, 10, 5, 0, 237, 238, 5, 53, 0, 0, 238, 240, 1, 0, 0, 0, 239, 231, 1, 0, 0, 0, 239, 233, 1, 0, 0, 0, 239, 234, 1, 0, 0, 0, 239, 235, 1, 0, 0, 0, 240, 246, 1, 0, 0, 0, 241, 242, 10, 1, 0, 0, 242, 243, 5, 36, 0, 0, 243, 245, 3, 22, 11, 0, 244, 241, 1, 0, 0, 0, 245, 248, 1, 0, 0, 0, 246, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 19, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 249, 250, 3, 56, 28, 0, 250, 260, 5, 43, 0, 0, 251, 261, 5, 64, 0, 0, 252, 257, 3, 10, 5, 0, 253, 254, 5, 37, 0, 0, 254, 256, 3, 10, 5, 0, 255, 253, 1, 0, 0, 0, 256, 259, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 261, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 260, 251, 1, 0, 0, 0, 260, 252, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 5, 53, 0, 0, 263, 21, 1, 0, 0, 0, 264, 265, 3, 56, 28, 0, 265, 23, 1, 0, 0, 0, 266, 267, 5, 15, 0, 0, 267, 268, 3, 26, 13, 0, 268, 25, 1, 0, 0, 0, 269, 274, 3, 28, 14, 0, 270, 271, 5, 37, 0, 0, 271, 273, 3, 28, 14, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 27, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 283, 3, 10, 5, 0, 278, 279, 3, 52, 26, 0, 279, 280, 5, 35, 0, 0, 280, 281, 3, 10, 5, 0, 281, 283, 1, 0, 0, 0, 282, 277, 1, 0, 0, 0, 282, 278, 1, 0, 0, 0, 283, 29, 1, 0, 0, 0, 284, 285, 5, 6, 0, 0, 285, 290, 3, 32, 16, 0, 286, 287, 5, 37, 0, 0, 287, 289, 3, 32, 16, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 295, 3, 38, 19, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 297, 1, 0, 0, 0, 296, 298, 3, 34, 17, 0, 297, 296, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 31, 1, 0, 0, 0, 299, 300, 5, 24, 0, 0, 300, 33, 1, 0, 0, 0, 301, 302, 5, 74, 0, 0, 302, 307, 3, 36, 18, 0, 303, 304, 5, 37, 0, 0, 304, 306, 3, 36, 18, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 35, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 311, 3, 94, 47, 0, 311, 312, 5, 35, 0, 0, 312, 313, 3, 94, 47, 0, 313, 37, 1, 0, 0, 0, 314, 317, 3, 40, 20, 0, 315, 317, 3, 42, 21, 0, 316, 314, 1, 0, 0, 0, 316, 315, 1, 0, 0, 0, 317, 39, 1, 0, 0, 0, 318, 319, 5, 75, 0, 0, 319, 324, 3, 32, 16, 0, 320, 321, 5, 37, 0, 0, 321, 323, 3, 32, 16, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 41, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 328, 5, 67, 0, 0, 328, 329, 3, 40, 20, 0, 329, 330, 5, 68, 0, 0, 330, 43, 1, 0, 0, 0, 331, 332, 5, 12, 0, 0, 332, 337, 3, 32, 16, 0, 333, 334, 5, 37, 0, 0, 334, 336, 3, 32, 16, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 342, 3, 26, 13, 0, 341, 340, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 345, 1, 0, 0, 0, 343, 344, 5, 32, 0, 0, 344, 346, 3, 26, 13, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 45, 1, 0, 0, 0, 347, 348, 5, 4, 0, 0, 348, 349, 3, 26, 13, 0, 349, 47, 1, 0, 0, 0, 350, 352, 5, 18, 0, 0, 351, 353, 3, 26, 13, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 355, 5, 32, 0, 0, 355, 357, 3, 26, 13, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 49, 1, 0, 0, 0, 358, 359, 5, 8, 0, 0, 359, 362, 3, 26, 13, 0, 360, 361, 5, 32, 0, 0, 361, 363, 3, 26, 13, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 51, 1, 0, 0, 0, 364, 369, 3, 56, 28, 0, 365, 366, 5, 39, 0, 0, 366, 368, 3, 56, 28, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 53, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 377, 3, 58, 29, 0, 373, 374, 5, 39, 0, 0, 374, 376, 3, 58, 29, 0, 375, 373, 1, 0, 0, 0, 376, 379, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 377, 378, 1, 0, 0, 0, 378, 55, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 380, 381, 7, 2, 0, 0, 381, 57, 1, 0, 0, 0, 382, 383, 5, 79, 0, 0, 383, 59, 1, 0, 0, 0, 384, 427, 5, 48, 0, 0, 385, 386, 3, 92, 46, 0, 386, 387, 5, 69, 0, 0, 387, 427, 1, 0, 0, 0, 388, 427, 3, 90, 45, 0, 389, 427, 3, 92, 46, 0, 390, 427, 3, 86, 43, 0, 391, 427, 5, 51, 0, 0, 392, 427, 3, 94, 47, 0, 393, 394, 5, 67, 0, 0, 394, 399, 3, 88, 44, 0, 395, 396, 5, 37, 0, 0, 396, 398, 3, 88, 44, 0, 397, 395, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, 403, 5, 68, 0, 0, 403, 427, 1, 0, 0, 0, 404, 405, 5, 67, 0, 0, 405, 410, 3, 86, 43, 0, 406, 407, 5, 37, 0, 0, 407, 409, 3, 86, 43, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 413, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 5, 68, 0, 0, 414, 427, 1, 0, 0, 0, 415, 416, 5, 67, 0, 0, 416, 421, 3, 94, 47, 0, 417, 418, 5, 37, 0, 0, 418, 420, 3, 94, 47, 0, 419, 417, 1, 0, 0, 0, 420, 423, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 424, 425, 5, 68, 0, 0, 425, 427, 1, 0, 0, 0, 426, 384, 1, 0, 0, 0, 426, 385, 1, 0, 0, 0, 426, 388, 1, 0, 0, 0, 426, 389, 1, 0, 0, 0, 426, 390, 1, 0, 0, 0, 426, 391, 1, 0, 0, 0, 426, 392, 1, 0, 0, 0, 426, 393, 1, 0, 0, 0, 426, 404, 1, 0, 0, 0, 426, 415, 1, 0, 0, 0, 427, 61, 1, 0, 0, 0, 428, 429, 5, 10, 0, 0, 429, 430, 5, 30, 0, 0, 430, 63, 1, 0, 0, 0, 431, 432, 5, 17, 0, 0, 432, 437, 3, 66, 33, 0, 433, 434, 5, 37, 0, 0, 434, 436, 3, 66, 33, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 65, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 442, 3, 10, 5, 0, 441, 443, 7, 3, 0, 0, 442, 441, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 445, 5, 49, 0, 0, 445, 447, 7, 4, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 67, 1, 0, 0, 0, 448, 449, 5, 9, 0, 0, 449, 454, 3, 54, 27, 0, 450, 451, 5, 37, 0, 0, 451, 453, 3, 54, 27, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 69, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 458, 5, 2, 0, 0, 458, 463, 3, 54, 27, 0, 459, 460, 5, 37, 0, 0, 460, 462, 3, 54, 27, 0, 461, 459, 1, 0, 0, 0, 462, 465, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 71, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 466, 467, 5, 14, 0, 0, 467, 472, 3, 74, 37, 0, 468, 469, 5, 37, 0, 0, 469, 471, 3, 74, 37, 0, 470, 468, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 73, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 476, 3, 54, 27, 0, 476, 477, 5, 83, 0, 0, 477, 478, 3, 54, 27, 0, 478, 75, 1, 0, 0, 0, 479, 480, 5, 1, 0, 0, 480, 481, 3, 18, 9, 0, 481, 483, 3, 94, 47, 0, 482, 484, 3, 82, 41, 0, 483, 482, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 77, 1, 0, 0, 0, 485, 486, 5, 7, 0, 0, 486, 487, 3, 18, 9, 0, 487, 488, 3, 94, 47, 0, 488, 79, 1, 0, 0, 0, 489, 490, 5, 13, 0, 0, 490, 491, 3, 52, 26, 0, 491, 81, 1, 0, 0, 0, 492, 497, 3, 84, 42, 0, 493, 494, 5, 37, 0, 0, 494, 496, 3, 84, 42, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 83, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 501, 3, 56, 28, 0, 501, 502, 5, 35, 0, 0, 502, 503, 3, 60, 30, 0, 503, 85, 1, 0, 0, 0, 504, 505, 7, 5, 0, 0, 505, 87, 1, 0, 0, 0, 506, 509, 3, 90, 45, 0, 507, 509, 3, 92, 46, 0, 508, 506, 1, 0, 0, 0, 508, 507, 1, 0, 0, 0, 509, 89, 1, 0, 0, 0, 510, 512, 7, 0, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 5, 31, 0, 0, 514, 91, 1, 0, 0, 0, 515, 517, 7, 0, 0, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 5, 30, 0, 0, 519, 93, 1, 0, 0, 0, 520, 521, 5, 29, 0, 0, 521, 95, 1, 0, 0, 0, 522, 523, 7, 6, 0, 0, 523, 97, 1, 0, 0, 0, 524, 525, 5, 5, 0, 0, 525, 526, 3, 100, 50, 0, 526, 99, 1, 0, 0, 0, 527, 528, 5, 67, 0, 0, 528, 529, 3, 2, 1, 0, 529, 530, 5, 68, 0, 0, 530, 101, 1, 0, 0, 0, 531, 532, 5, 16, 0, 0, 532, 533, 5, 99, 0, 0, 533, 103, 1, 0, 0, 0, 534, 535, 5, 11, 0, 0, 535, 536, 5, 103, 0, 0, 536, 105, 1, 0, 0, 0, 537, 538, 5, 3, 0, 0, 538, 541, 5, 89, 0, 0, 539, 540, 5, 87, 0, 0, 540, 542, 3, 54, 27, 0, 541, 539, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 552, 1, 0, 0, 0, 543, 544, 5, 88, 0, 0, 544, 549, 3, 108, 54, 0, 545, 546, 5, 37, 0, 0, 546, 548, 3, 108, 54, 0, 547, 545, 1, 0, 0, 0, 548, 551, 1, 0, 0, 0, 549, 547, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 553, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 552, 543, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 107, 1, 0, 0, 0, 554, 555, 3, 54, 27, 0, 555, 556, 5, 35, 0, 0, 556, 558, 1, 0, 0, 0, 557, 554, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 3, 54, 27, 0, 560, 109, 1, 0, 0, 0, 55, 121, 130, 145, 157, 166, 174, 178, 186, 188, 193, 200, 205, 212, 218, 226, 228, 239, 246, 257, 260, 274, 282, 290, 294, 297, 307, 316, 324, 337, 341, 345, 352, 356, 362, 369, 377, 399, 410, 421, 426, 437, 442, 446, 454, 463, 472, 483, 497, 508, 511, 516, 541, 549, 552, 557] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1f9c13c16cdd4..7cf25b86ded5c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -18,58 +18,60 @@ public class EsqlBaseParser extends Parser { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, - STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, - FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, - OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, - METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, - FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, - PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, - RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, - ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, - ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, - MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, - SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, - META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, - SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; + KEEP=9, LIMIT=10, META=11, METRICS=12, MV_EXPAND=13, RENAME=14, ROW=15, + SHOW=16, SORT=17, STATS=18, WHERE=19, UNKNOWN_CMD=20, LINE_COMMENT=21, + MULTILINE_COMMENT=22, WS=23, INDEX_UNQUOTED_IDENTIFIER=24, EXPLAIN_WS=25, + EXPLAIN_LINE_COMMENT=26, EXPLAIN_MULTILINE_COMMENT=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COMMA=37, DESC=38, DOT=39, FALSE=40, FIRST=41, LAST=42, LP=43, + IN=44, IS=45, LIKE=46, NOT=47, NULL=48, NULLS=49, OR=50, PARAM=51, RLIKE=52, + RP=53, TRUE=54, EQ=55, CIEQ=56, NEQ=57, LT=58, LTE=59, GT=60, GTE=61, + PLUS=62, MINUS=63, ASTERISK=64, SLASH=65, PERCENT=66, OPENING_BRACKET=67, + CLOSING_BRACKET=68, UNQUOTED_IDENTIFIER=69, QUOTED_IDENTIFIER=70, EXPR_LINE_COMMENT=71, + EXPR_MULTILINE_COMMENT=72, EXPR_WS=73, OPTIONS=74, METADATA=75, FROM_LINE_COMMENT=76, + FROM_MULTILINE_COMMENT=77, FROM_WS=78, ID_PATTERN=79, PROJECT_LINE_COMMENT=80, + PROJECT_MULTILINE_COMMENT=81, PROJECT_WS=82, AS=83, RENAME_LINE_COMMENT=84, + RENAME_MULTILINE_COMMENT=85, RENAME_WS=86, ON=87, WITH=88, ENRICH_POLICY_NAME=89, + ENRICH_LINE_COMMENT=90, ENRICH_MULTILINE_COMMENT=91, ENRICH_WS=92, ENRICH_FIELD_LINE_COMMENT=93, + ENRICH_FIELD_MULTILINE_COMMENT=94, ENRICH_FIELD_WS=95, MVEXPAND_LINE_COMMENT=96, + MVEXPAND_MULTILINE_COMMENT=97, MVEXPAND_WS=98, INFO=99, SHOW_LINE_COMMENT=100, + SHOW_MULTILINE_COMMENT=101, SHOW_WS=102, FUNCTIONS=103, META_LINE_COMMENT=104, + META_MULTILINE_COMMENT=105, META_WS=106, COLON=107, SETTING=108, SETTING_LINE_COMMENT=109, + SETTTING_MULTILINE_COMMENT=110, SETTING_WS=111, METRICS_LINE_COMMENT=112, + METRICS_MULTILINE_COMMENT=113, METRICS_WS=114, CLOSING_METRICS_LINE_COMMENT=115, + CLOSING_METRICS_MULTILINE_COMMENT=116, CLOSING_METRICS_WS=117; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, RULE_functionExpression = 10, RULE_dataType = 11, RULE_rowCommand = 12, - RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_fromIdentifier = 16, + RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_indexIdentifier = 16, RULE_fromOptions = 17, RULE_configOption = 18, RULE_metadata = 19, RULE_metadataOption = 20, - RULE_deprecated_metadata = 21, RULE_evalCommand = 22, RULE_statsCommand = 23, - RULE_inlinestatsCommand = 24, RULE_qualifiedName = 25, RULE_qualifiedNamePattern = 26, - RULE_identifier = 27, RULE_identifierPattern = 28, RULE_constant = 29, - RULE_limitCommand = 30, RULE_sortCommand = 31, RULE_orderExpression = 32, - RULE_keepCommand = 33, RULE_dropCommand = 34, RULE_renameCommand = 35, - RULE_renameClause = 36, RULE_dissectCommand = 37, RULE_grokCommand = 38, - RULE_mvExpandCommand = 39, RULE_commandOptions = 40, RULE_commandOption = 41, - RULE_booleanValue = 42, RULE_numericValue = 43, RULE_decimalValue = 44, - RULE_integerValue = 45, RULE_string = 46, RULE_comparisonOperator = 47, - RULE_explainCommand = 48, RULE_subqueryExpression = 49, RULE_showCommand = 50, - RULE_metaCommand = 51, RULE_enrichCommand = 52, RULE_enrichWithClause = 53; + RULE_deprecated_metadata = 21, RULE_metricsCommand = 22, RULE_evalCommand = 23, + RULE_statsCommand = 24, RULE_inlinestatsCommand = 25, RULE_qualifiedName = 26, + RULE_qualifiedNamePattern = 27, RULE_identifier = 28, RULE_identifierPattern = 29, + RULE_constant = 30, RULE_limitCommand = 31, RULE_sortCommand = 32, RULE_orderExpression = 33, + RULE_keepCommand = 34, RULE_dropCommand = 35, RULE_renameCommand = 36, + RULE_renameClause = 37, RULE_dissectCommand = 38, RULE_grokCommand = 39, + RULE_mvExpandCommand = 40, RULE_commandOptions = 41, RULE_commandOption = 42, + RULE_booleanValue = 43, RULE_numericValue = 44, RULE_decimalValue = 45, + RULE_integerValue = 46, RULE_string = 47, RULE_comparisonOperator = 48, + RULE_explainCommand = 49, RULE_subqueryExpression = 50, RULE_showCommand = 51, + RULE_metaCommand = 52, RULE_enrichCommand = 53, RULE_enrichWithClause = 54; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "dataType", "rowCommand", - "fields", "field", "fromCommand", "fromIdentifier", "fromOptions", "configOption", - "metadata", "metadataOption", "deprecated_metadata", "evalCommand", "statsCommand", - "inlinestatsCommand", "qualifiedName", "qualifiedNamePattern", "identifier", - "identifierPattern", "constant", "limitCommand", "sortCommand", "orderExpression", - "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", - "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", - "booleanValue", "numericValue", "decimalValue", "integerValue", "string", - "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", - "metaCommand", "enrichCommand", "enrichWithClause" + "fields", "field", "fromCommand", "indexIdentifier", "fromOptions", "configOption", + "metadata", "metadataOption", "deprecated_metadata", "metricsCommand", + "evalCommand", "statsCommand", "inlinestatsCommand", "qualifiedName", + "qualifiedNamePattern", "identifier", "identifierPattern", "constant", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "metaCommand", "enrichCommand", "enrichWithClause" }; } public static final String[] ruleNames = makeRuleNames(); @@ -77,15 +79,15 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'options'", "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'metrics'", + "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", + null, null, null, null, null, null, null, null, "'|'", null, null, null, + "'by'", "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", + "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + null, "']'", null, null, null, null, null, "'options'", "'metadata'", + null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" }; @@ -94,25 +96,28 @@ private static String[] makeLiteralNames() { private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", + "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "OPTIONS", "METADATA", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -199,9 +204,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(108); + setState(110); query(0); - setState(109); + setState(111); match(EOF); } } @@ -297,11 +302,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(112); + setState(114); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(119); + setState(121); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -312,16 +317,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(114); + setState(116); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(115); + setState(117); match(PIPE); - setState(116); + setState(118); processingCommand(); } } } - setState(121); + setState(123); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -349,6 +354,9 @@ public FromCommandContext fromCommand() { public RowCommandContext rowCommand() { return getRuleContext(RowCommandContext.class,0); } + public MetricsCommandContext metricsCommand() { + return getRuleContext(MetricsCommandContext.class,0); + } public ShowCommandContext showCommand() { return getRuleContext(ShowCommandContext.class,0); } @@ -379,41 +387,48 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(127); + setState(130); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(122); + setState(124); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(123); + setState(125); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(124); + setState(126); rowCommand(); } break; - case SHOW: + case METRICS: enterOuterAlt(_localctx, 4); { - setState(125); + setState(127); + metricsCommand(); + } + break; + case SHOW: + enterOuterAlt(_localctx, 5); + { + setState(128); showCommand(); } break; case META: - enterOuterAlt(_localctx, 5); + enterOuterAlt(_localctx, 6); { - setState(126); + setState(129); metaCommand(); } break; @@ -497,97 +512,97 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(142); + setState(145); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(129); + setState(132); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(130); + setState(133); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(131); + setState(134); limitCommand(); } break; case KEEP: enterOuterAlt(_localctx, 4); { - setState(132); + setState(135); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(133); + setState(136); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(134); + setState(137); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(135); + setState(138); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(136); + setState(139); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(137); + setState(140); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(138); + setState(141); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(139); + setState(142); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(140); + setState(143); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(141); + setState(144); mvExpandCommand(); } break; @@ -638,9 +653,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(144); + setState(147); match(WHERE); - setState(145); + setState(148); booleanExpression(0); } } @@ -835,7 +850,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(175); + setState(178); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -844,9 +859,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(148); + setState(151); match(NOT); - setState(149); + setState(152); booleanExpression(7); } break; @@ -855,7 +870,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(150); + setState(153); valueExpression(); } break; @@ -864,7 +879,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(151); + setState(154); regexBooleanExpression(); } break; @@ -873,41 +888,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(152); + setState(155); valueExpression(); - setState(154); + setState(157); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(153); + setState(156); match(NOT); } } - setState(156); + setState(159); match(IN); - setState(157); + setState(160); match(LP); - setState(158); + setState(161); valueExpression(); - setState(163); + setState(166); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(159); + setState(162); match(COMMA); - setState(160); + setState(163); valueExpression(); } } - setState(165); + setState(168); _errHandler.sync(this); _la = _input.LA(1); } - setState(166); + setState(169); match(RP); } break; @@ -916,27 +931,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(168); + setState(171); valueExpression(); - setState(169); + setState(172); match(IS); - setState(171); + setState(174); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(170); + setState(173); match(NOT); } } - setState(173); + setState(176); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(185); + setState(188); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -944,7 +959,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(183); + setState(186); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -952,11 +967,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(177); + setState(180); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(178); + setState(181); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(179); + setState(182); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -965,18 +980,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(180); + setState(183); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(181); + setState(184); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(182); + setState(185); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(187); + setState(190); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1031,48 +1046,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(202); + setState(205); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(188); + setState(191); valueExpression(); - setState(190); + setState(193); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(189); + setState(192); match(NOT); } } - setState(192); + setState(195); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(193); + setState(196); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(195); + setState(198); valueExpression(); - setState(197); + setState(200); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(196); + setState(199); match(NOT); } } - setState(199); + setState(202); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(200); + setState(203); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1158,14 +1173,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(209); + setState(212); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(204); + setState(207); operatorExpression(0); } break; @@ -1173,11 +1188,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(205); + setState(208); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(206); + setState(209); comparisonOperator(); - setState(207); + setState(210); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1302,7 +1317,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(215); + setState(218); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1311,7 +1326,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(212); + setState(215); primaryExpression(0); } break; @@ -1320,7 +1335,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(213); + setState(216); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1331,13 +1346,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(214); + setState(217); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(225); + setState(228); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1345,7 +1360,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(223); + setState(226); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1353,12 +1368,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(217); + setState(220); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(218); + setState(221); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0)) ) { + if ( !(((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1366,7 +1381,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(219); + setState(222); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1375,9 +1390,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(220); + setState(223); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(221); + setState(224); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1388,14 +1403,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(222); + setState(225); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(227); + setState(230); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1553,7 +1568,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(236); + setState(239); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1562,7 +1577,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(229); + setState(232); constant(); } break; @@ -1571,7 +1586,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(230); + setState(233); qualifiedName(); } break; @@ -1580,7 +1595,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(231); + setState(234); functionExpression(); } break; @@ -1589,17 +1604,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(232); + setState(235); match(LP); - setState(233); + setState(236); booleanExpression(0); - setState(234); + setState(237); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(243); + setState(246); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1610,16 +1625,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(238); + setState(241); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(239); + setState(242); match(CAST_OP); - setState(240); + setState(243); dataType(); } } } - setState(245); + setState(248); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1681,16 +1696,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(246); + setState(249); identifier(); - setState(247); + setState(250); match(LP); - setState(257); + setState(260); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(248); + setState(251); match(ASTERISK); } break; @@ -1710,21 +1725,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(249); + setState(252); booleanExpression(0); - setState(254); + setState(257); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(250); + setState(253); match(COMMA); - setState(251); + setState(254); booleanExpression(0); } } - setState(256); + setState(259); _errHandler.sync(this); _la = _input.LA(1); } @@ -1736,7 +1751,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(259); + setState(262); match(RP); } } @@ -1794,7 +1809,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(261); + setState(264); identifier(); } } @@ -1841,9 +1856,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(263); + setState(266); match(ROW); - setState(264); + setState(267); fields(); } } @@ -1897,23 +1912,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(266); + setState(269); field(); - setState(271); + setState(274); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(267); + setState(270); match(COMMA); - setState(268); + setState(271); field(); } } } - setState(273); + setState(276); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -1963,24 +1978,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 28, RULE_field); try { - setState(279); + setState(282); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(274); + setState(277); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(275); + setState(278); qualifiedName(); - setState(276); + setState(279); match(ASSIGN); - setState(277); + setState(280); booleanExpression(0); } break; @@ -2000,11 +2015,11 @@ public final FieldContext field() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromCommandContext extends ParserRuleContext { public TerminalNode FROM() { return getToken(EsqlBaseParser.FROM, 0); } - public List fromIdentifier() { - return getRuleContexts(FromIdentifierContext.class); + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); } - public FromIdentifierContext fromIdentifier(int i) { - return getRuleContext(FromIdentifierContext.class,i); + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2043,44 +2058,44 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(281); + setState(284); match(FROM); - setState(282); - fromIdentifier(); - setState(287); + setState(285); + indexIdentifier(); + setState(290); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(283); + setState(286); match(COMMA); - setState(284); - fromIdentifier(); + setState(287); + indexIdentifier(); } } } - setState(289); + setState(292); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(291); + setState(294); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(290); + setState(293); metadata(); } break; } - setState(294); + setState(297); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(293); + setState(296); fromOptions(); } break; @@ -2099,46 +2114,36 @@ public final FromCommandContext fromCommand() throws RecognitionException { } @SuppressWarnings("CheckReturnValue") - public static class FromIdentifierContext extends ParserRuleContext { - public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } - public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } + public static class IndexIdentifierContext extends ParserRuleContext { + public TerminalNode INDEX_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.INDEX_UNQUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") - public FromIdentifierContext(ParserRuleContext parent, int invokingState) { + public IndexIdentifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_fromIdentifier; } + @Override public int getRuleIndex() { return RULE_indexIdentifier; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFromIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIndexIdentifier(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFromIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitIndexIdentifier(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFromIdentifier(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitIndexIdentifier(this); else return visitor.visitChildren(this); } } - public final FromIdentifierContext fromIdentifier() throws RecognitionException { - FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_fromIdentifier); - int _la; + public final IndexIdentifierContext indexIdentifier() throws RecognitionException { + IndexIdentifierContext _localctx = new IndexIdentifierContext(_ctx, getState()); + enterRule(_localctx, 32, RULE_indexIdentifier); try { enterOuterAlt(_localctx, 1); { - setState(296); - _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } + setState(299); + match(INDEX_UNQUOTED_IDENTIFIER); } } catch (RecognitionException re) { @@ -2192,25 +2197,25 @@ public final FromOptionsContext fromOptions() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(298); + setState(301); match(OPTIONS); - setState(299); + setState(302); configOption(); - setState(304); + setState(307); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(300); + setState(303); match(COMMA); - setState(301); + setState(304); configOption(); } } } - setState(306); + setState(309); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } @@ -2262,11 +2267,11 @@ public final ConfigOptionContext configOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(307); + setState(310); string(); - setState(308); + setState(311); match(ASSIGN); - setState(309); + setState(312); string(); } } @@ -2313,20 +2318,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 38, RULE_metadata); try { - setState(313); + setState(316); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(311); + setState(314); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(312); + setState(315); deprecated_metadata(); } break; @@ -2348,11 +2353,11 @@ public final MetadataContext metadata() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class MetadataOptionContext extends ParserRuleContext { public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); } - public List fromIdentifier() { - return getRuleContexts(FromIdentifierContext.class); + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); } - public FromIdentifierContext fromIdentifier(int i) { - return getRuleContext(FromIdentifierContext.class,i); + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2385,25 +2390,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(315); + setState(318); match(METADATA); - setState(316); - fromIdentifier(); - setState(321); + setState(319); + indexIdentifier(); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(317); + setState(320); match(COMMA); - setState(318); - fromIdentifier(); + setState(321); + indexIdentifier(); } } } - setState(323); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2452,11 +2457,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(327); match(OPENING_BRACKET); - setState(325); + setState(328); metadataOption(); - setState(326); + setState(329); match(CLOSING_BRACKET); } } @@ -2471,6 +2476,112 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class MetricsCommandContext extends ParserRuleContext { + public FieldsContext aggregates; + public FieldsContext grouping; + public TerminalNode METRICS() { return getToken(EsqlBaseParser.METRICS, 0); } + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); + } + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } + public List fields() { + return getRuleContexts(FieldsContext.class); + } + public FieldsContext fields(int i) { + return getRuleContext(FieldsContext.class,i); + } + @SuppressWarnings("this-escape") + public MetricsCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_metricsCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetricsCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetricsCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMetricsCommand(this); + else return visitor.visitChildren(this); + } + } + + public final MetricsCommandContext metricsCommand() throws RecognitionException { + MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_metricsCommand); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(331); + match(METRICS); + setState(332); + indexIdentifier(); + setState(337); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(333); + match(COMMA); + setState(334); + indexIdentifier(); + } + } + } + setState(339); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + } + setState(341); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + case 1: + { + setState(340); + ((MetricsCommandContext)_localctx).aggregates = fields(); + } + break; + } + setState(345); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + case 1: + { + setState(343); + match(BY); + setState(344); + ((MetricsCommandContext)_localctx).grouping = fields(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class EvalCommandContext extends ParserRuleContext { public TerminalNode EVAL() { return getToken(EsqlBaseParser.EVAL, 0); } @@ -2499,13 +2610,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_evalCommand); + enterRule(_localctx, 46, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(347); match(EVAL); - setState(329); + setState(348); fields(); } } @@ -2554,30 +2665,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_statsCommand); + enterRule(_localctx, 48, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(331); + setState(350); match(STATS); - setState(333); + setState(352); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(332); + setState(351); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(337); + setState(356); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(335); + setState(354); match(BY); - setState(336); + setState(355); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2629,22 +2740,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_inlinestatsCommand); + enterRule(_localctx, 50, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(339); + setState(358); match(INLINESTATS); - setState(340); + setState(359); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(343); + setState(362); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(341); + setState(360); match(BY); - setState(342); + setState(361); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2696,30 +2807,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_qualifiedName); + enterRule(_localctx, 52, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(345); + setState(364); identifier(); - setState(350); + setState(369); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(346); + setState(365); match(DOT); - setState(347); + setState(366); identifier(); } } } - setState(352); + setState(371); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -2768,30 +2879,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_qualifiedNamePattern); + enterRule(_localctx, 54, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(353); + setState(372); identifierPattern(); - setState(358); + setState(377); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(354); + setState(373); match(DOT); - setState(355); + setState(374); identifierPattern(); } } } - setState(360); + setState(379); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -2832,12 +2943,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_identifier); + enterRule(_localctx, 56, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(361); + setState(380); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2885,11 +2996,11 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_identifierPattern); + enterRule(_localctx, 58, RULE_identifierPattern); try { enterOuterAlt(_localctx, 1); { - setState(363); + setState(382); match(ID_PATTERN); } } @@ -3155,17 +3266,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_constant); + enterRule(_localctx, 60, RULE_constant); int _la; try { - setState(407); + setState(426); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(365); + setState(384); match(NULL); } break; @@ -3173,9 +3284,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(366); + setState(385); integerValue(); - setState(367); + setState(386); match(UNQUOTED_IDENTIFIER); } break; @@ -3183,7 +3294,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(369); + setState(388); decimalValue(); } break; @@ -3191,7 +3302,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(370); + setState(389); integerValue(); } break; @@ -3199,7 +3310,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(371); + setState(390); booleanValue(); } break; @@ -3207,7 +3318,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(372); + setState(391); match(PARAM); } break; @@ -3215,7 +3326,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(373); + setState(392); string(); } break; @@ -3223,27 +3334,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(374); + setState(393); match(OPENING_BRACKET); - setState(375); + setState(394); numericValue(); - setState(380); + setState(399); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(376); + setState(395); match(COMMA); - setState(377); + setState(396); numericValue(); } } - setState(382); + setState(401); _errHandler.sync(this); _la = _input.LA(1); } - setState(383); + setState(402); match(CLOSING_BRACKET); } break; @@ -3251,27 +3362,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(385); + setState(404); match(OPENING_BRACKET); - setState(386); + setState(405); booleanValue(); - setState(391); + setState(410); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(387); + setState(406); match(COMMA); - setState(388); + setState(407); booleanValue(); } } - setState(393); + setState(412); _errHandler.sync(this); _la = _input.LA(1); } - setState(394); + setState(413); match(CLOSING_BRACKET); } break; @@ -3279,27 +3390,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(396); + setState(415); match(OPENING_BRACKET); - setState(397); + setState(416); string(); - setState(402); + setState(421); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(398); + setState(417); match(COMMA); - setState(399); + setState(418); string(); } } - setState(404); + setState(423); _errHandler.sync(this); _la = _input.LA(1); } - setState(405); + setState(424); match(CLOSING_BRACKET); } break; @@ -3342,13 +3453,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_limitCommand); + enterRule(_localctx, 62, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(409); + setState(428); match(LIMIT); - setState(410); + setState(429); match(INTEGER_LITERAL); } } @@ -3398,32 +3509,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_sortCommand); + enterRule(_localctx, 64, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(412); + setState(431); match(SORT); - setState(413); + setState(432); orderExpression(); - setState(418); + setState(437); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(414); + setState(433); match(COMMA); - setState(415); + setState(434); orderExpression(); } } } - setState(420); + setState(439); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3472,19 +3583,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_orderExpression); + enterRule(_localctx, 66, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(421); + setState(440); booleanExpression(0); - setState(423); + setState(442); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(422); + setState(441); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3498,14 +3609,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(427); + setState(446); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(425); + setState(444); match(NULLS); - setState(426); + setState(445); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3567,32 +3678,32 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_keepCommand); + enterRule(_localctx, 68, RULE_keepCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(429); + setState(448); match(KEEP); - setState(430); + setState(449); qualifiedNamePattern(); - setState(435); + setState(454); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(431); + setState(450); match(COMMA); - setState(432); + setState(451); qualifiedNamePattern(); } } } - setState(437); + setState(456); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } } } @@ -3642,32 +3753,32 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_dropCommand); + enterRule(_localctx, 70, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(438); + setState(457); match(DROP); - setState(439); + setState(458); qualifiedNamePattern(); - setState(444); + setState(463); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(440); + setState(459); match(COMMA); - setState(441); + setState(460); qualifiedNamePattern(); } } } - setState(446); + setState(465); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } } } @@ -3717,32 +3828,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_renameCommand); + enterRule(_localctx, 72, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(447); + setState(466); match(RENAME); - setState(448); + setState(467); renameClause(); - setState(453); + setState(472); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(449); + setState(468); match(COMMA); - setState(450); + setState(469); renameClause(); } } } - setState(455); + setState(474); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } } } @@ -3790,15 +3901,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_renameClause); + enterRule(_localctx, 74, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(456); + setState(475); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(457); + setState(476); match(AS); - setState(458); + setState(477); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3847,22 +3958,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_dissectCommand); + enterRule(_localctx, 76, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(460); + setState(479); match(DISSECT); - setState(461); + setState(480); primaryExpression(0); - setState(462); + setState(481); string(); - setState(464); + setState(483); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(463); + setState(482); commandOptions(); } break; @@ -3911,15 +4022,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_grokCommand); + enterRule(_localctx, 78, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(466); + setState(485); match(GROK); - setState(467); + setState(486); primaryExpression(0); - setState(468); + setState(487); string(); } } @@ -3962,13 +4073,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_mvExpandCommand); + enterRule(_localctx, 80, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(470); + setState(489); match(MV_EXPAND); - setState(471); + setState(490); qualifiedName(); } } @@ -4017,30 +4128,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_commandOptions); + enterRule(_localctx, 82, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(473); + setState(492); commandOption(); - setState(478); + setState(497); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(474); + setState(493); match(COMMA); - setState(475); + setState(494); commandOption(); } } } - setState(480); + setState(499); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } } } @@ -4086,15 +4197,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_commandOption); + enterRule(_localctx, 84, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(481); + setState(500); identifier(); - setState(482); + setState(501); match(ASSIGN); - setState(483); + setState(502); constant(); } } @@ -4135,12 +4246,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_booleanValue); + enterRule(_localctx, 86, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(504); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4193,22 +4304,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_numericValue); + enterRule(_localctx, 88, RULE_numericValue); try { - setState(489); + setState(508); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(487); + setState(506); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(488); + setState(507); integerValue(); } break; @@ -4252,17 +4363,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_decimalValue); + enterRule(_localctx, 90, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(492); + setState(511); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(491); + setState(510); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4275,7 +4386,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(494); + setState(513); match(DECIMAL_LITERAL); } } @@ -4317,17 +4428,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_integerValue); + enterRule(_localctx, 92, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(497); + setState(516); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(496); + setState(515); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4340,7 +4451,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(499); + setState(518); match(INTEGER_LITERAL); } } @@ -4380,11 +4491,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_string); + enterRule(_localctx, 94, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(501); + setState(520); match(QUOTED_STRING); } } @@ -4429,14 +4540,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_comparisonOperator); + enterRule(_localctx, 96, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(503); + setState(522); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 1125899906842624000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 4503599627370496000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4485,13 +4596,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_explainCommand); + enterRule(_localctx, 98, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(505); + setState(524); match(EXPLAIN); - setState(506); + setState(525); subqueryExpression(); } } @@ -4535,15 +4646,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_subqueryExpression); + enterRule(_localctx, 100, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(508); + setState(527); match(OPENING_BRACKET); - setState(509); + setState(528); query(0); - setState(510); + setState(529); match(CLOSING_BRACKET); } } @@ -4595,14 +4706,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_showCommand); + enterRule(_localctx, 102, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(512); + setState(531); match(SHOW); - setState(513); + setState(532); match(INFO); } } @@ -4654,14 +4765,14 @@ public T accept(ParseTreeVisitor visitor) { public final MetaCommandContext metaCommand() throws RecognitionException { MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_metaCommand); + enterRule(_localctx, 104, RULE_metaCommand); try { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(515); + setState(534); match(META); - setState(516); + setState(535); match(FUNCTIONS); } } @@ -4719,53 +4830,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_enrichCommand); + enterRule(_localctx, 106, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(518); + setState(537); match(ENRICH); - setState(519); + setState(538); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(522); + setState(541); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(520); + setState(539); match(ON); - setState(521); + setState(540); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(533); + setState(552); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(524); + setState(543); match(WITH); - setState(525); + setState(544); enrichWithClause(); - setState(530); + setState(549); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,52,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(545); match(COMMA); - setState(527); + setState(546); enrichWithClause(); } } } - setState(532); + setState(551); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,52,_ctx); } } break; @@ -4816,23 +4927,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_enrichWithClause); + enterRule(_localctx, 108, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(557); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(535); + setState(554); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(536); + setState(555); match(ASSIGN); } break; } - setState(540); + setState(559); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4894,7 +5005,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\u0004\u0001n\u021f\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001u\u0232\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4908,337 +5019,350 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0005\u0001v\b\u0001\n\u0001\f\u0001y\t\u0001\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0080\b\u0002"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0003\u0003\u008f\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0003\u0005\u009b\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0005\u0005\u00a2\b\u0005\n\u0005\f\u0005\u00a5"+ - "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00ac\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b0\b\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00b8\b\u0005\n\u0005\f\u0005\u00bb\t\u0005\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00bf\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0003\u0006\u00c6\b\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00cb\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0003\u0007\u00d2\b\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0003\b\u00d8\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005"+ - "\b\u00e0\b\b\n\b\f\b\u00e3\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0003\t\u00ed\b\t\u0001\t\u0001\t\u0001\t\u0005"+ - "\t\u00f2\b\t\n\t\f\t\u00f5\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0005\n\u00fd\b\n\n\n\f\n\u0100\t\n\u0003\n\u0102\b\n\u0001\n"+ - "\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ - "\r\u0001\r\u0005\r\u010e\b\r\n\r\f\r\u0111\t\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0118\b\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u011e\b\u000f\n\u000f"+ - "\f\u000f\u0121\t\u000f\u0001\u000f\u0003\u000f\u0124\b\u000f\u0001\u000f"+ - "\u0003\u000f\u0127\b\u000f\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0005\u0011\u012f\b\u0011\n\u0011\f\u0011\u0132"+ - "\t\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0003\u0013\u013a\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0005\u0014\u0140\b\u0014\n\u0014\f\u0014\u0143\t\u0014\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0003\u0017\u014e\b\u0017\u0001\u0017\u0001\u0017"+ - "\u0003\u0017\u0152\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0003\u0018\u0158\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019"+ - "\u015d\b\u0019\n\u0019\f\u0019\u0160\t\u0019\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0005\u001a\u0165\b\u001a\n\u001a\f\u001a\u0168\t\u001a\u0001\u001b"+ - "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u017b\b\u001d"+ - "\n\u001d\f\u001d\u017e\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u0186\b\u001d\n\u001d\f\u001d"+ - "\u0189\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0005\u001d\u0191\b\u001d\n\u001d\f\u001d\u0194\t\u001d\u0001"+ - "\u001d\u0001\u001d\u0003\u001d\u0198\b\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a1"+ - "\b\u001f\n\u001f\f\u001f\u01a4\t\u001f\u0001 \u0001 \u0003 \u01a8\b \u0001"+ - " \u0001 \u0003 \u01ac\b \u0001!\u0001!\u0001!\u0001!\u0005!\u01b2\b!\n"+ - "!\f!\u01b5\t!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f"+ - "\"\u01be\t\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01c4\b#\n#\f#\u01c7\t"+ - "#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0003%\u01d1"+ - "\b%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001("+ - "\u0001(\u0005(\u01dd\b(\n(\f(\u01e0\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001+\u0001+\u0003+\u01ea\b+\u0001,\u0003,\u01ed\b,\u0001,\u0001"+ - ",\u0001-\u0003-\u01f2\b-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001"+ - "0\u00010\u00010\u00011\u00011\u00011\u00011\u00012\u00012\u00012\u0001"+ - "3\u00013\u00013\u00014\u00014\u00014\u00014\u00034\u020b\b4\u00014\u0001"+ - "4\u00014\u00014\u00054\u0211\b4\n4\f4\u0214\t4\u00034\u0216\b4\u00015"+ - "\u00015\u00015\u00035\u021b\b5\u00015\u00015\u00015\u0000\u0004\u0002"+ - "\n\u0010\u00126\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014"+ - "\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh"+ - "j\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000DDJJ\u0001\u0000CD\u0002"+ - "\u0000 $$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000"+ - "l\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001"+ - "\u0000\u0000\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000"+ - "\u0000\u0000\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000"+ - "\u0000\u000e\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000"+ - "\u0000\u0012\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000"+ - "\u0000\u0016\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000"+ - "\u0000\u001a\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000"+ - "\u0000\u001e\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000"+ - "\"\u012a\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139"+ - "\u0001\u0000\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000"+ - "\u0000\u0000,\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u0000"+ - "0\u0153\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161"+ - "\u0001\u0000\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000"+ - "\u0000\u0000:\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000"+ - ">\u019c\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad"+ - "\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000"+ - "\u0000\u0000H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000"+ - "L\u01d2\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9"+ - "\u0001\u0000\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000"+ - "\u0000\u0000V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000"+ - "Z\u01f1\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7"+ - "\u0001\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000"+ - "\u0000\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000"+ - "h\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ - "\u0002\u0001\u0000mn\u0005\u0000\u0000\u0001n\u0001\u0001\u0000\u0000"+ - "\u0000op\u0006\u0001\uffff\uffff\u0000pq\u0003\u0004\u0002\u0000qw\u0001"+ - "\u0000\u0000\u0000rs\n\u0001\u0000\u0000st\u0005\u001a\u0000\u0000tv\u0003"+ - "\u0006\u0003\u0000ur\u0001\u0000\u0000\u0000vy\u0001\u0000\u0000\u0000"+ - "wu\u0001\u0000\u0000\u0000wx\u0001\u0000\u0000\u0000x\u0003\u0001\u0000"+ - "\u0000\u0000yw\u0001\u0000\u0000\u0000z\u0080\u0003`0\u0000{\u0080\u0003"+ - "\u001e\u000f\u0000|\u0080\u0003\u0018\f\u0000}\u0080\u0003d2\u0000~\u0080"+ - "\u0003f3\u0000\u007fz\u0001\u0000\u0000\u0000\u007f{\u0001\u0000\u0000"+ - "\u0000\u007f|\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f"+ - "~\u0001\u0000\u0000\u0000\u0080\u0005\u0001\u0000\u0000\u0000\u0081\u008f"+ - "\u0003,\u0016\u0000\u0082\u008f\u00030\u0018\u0000\u0083\u008f\u0003<"+ - "\u001e\u0000\u0084\u008f\u0003B!\u0000\u0085\u008f\u0003>\u001f\u0000"+ - "\u0086\u008f\u0003.\u0017\u0000\u0087\u008f\u0003\b\u0004\u0000\u0088"+ - "\u008f\u0003D\"\u0000\u0089\u008f\u0003F#\u0000\u008a\u008f\u0003J%\u0000"+ - "\u008b\u008f\u0003L&\u0000\u008c\u008f\u0003h4\u0000\u008d\u008f\u0003"+ - "N\'\u0000\u008e\u0081\u0001\u0000\u0000\u0000\u008e\u0082\u0001\u0000"+ - "\u0000\u0000\u008e\u0083\u0001\u0000\u0000\u0000\u008e\u0084\u0001\u0000"+ - "\u0000\u0000\u008e\u0085\u0001\u0000\u0000\u0000\u008e\u0086\u0001\u0000"+ - "\u0000\u0000\u008e\u0087\u0001\u0000\u0000\u0000\u008e\u0088\u0001\u0000"+ - "\u0000\u0000\u008e\u0089\u0001\u0000\u0000\u0000\u008e\u008a\u0001\u0000"+ - "\u0000\u0000\u008e\u008b\u0001\u0000\u0000\u0000\u008e\u008c\u0001\u0000"+ - "\u0000\u0000\u008e\u008d\u0001\u0000\u0000\u0000\u008f\u0007\u0001\u0000"+ - "\u0000\u0000\u0090\u0091\u0005\u0012\u0000\u0000\u0091\u0092\u0003\n\u0005"+ - "\u0000\u0092\t\u0001\u0000\u0000\u0000\u0093\u0094\u0006\u0005\uffff\uffff"+ - "\u0000\u0094\u0095\u0005-\u0000\u0000\u0095\u00b0\u0003\n\u0005\u0007"+ - "\u0096\u00b0\u0003\u000e\u0007\u0000\u0097\u00b0\u0003\f\u0006\u0000\u0098"+ - "\u009a\u0003\u000e\u0007\u0000\u0099\u009b\u0005-\u0000\u0000\u009a\u0099"+ - "\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000\u009b\u009c"+ - "\u0001\u0000\u0000\u0000\u009c\u009d\u0005*\u0000\u0000\u009d\u009e\u0005"+ - ")\u0000\u0000\u009e\u00a3\u0003\u000e\u0007\u0000\u009f\u00a0\u0005#\u0000"+ - "\u0000\u00a0\u00a2\u0003\u000e\u0007\u0000\u00a1\u009f\u0001\u0000\u0000"+ - "\u0000\u00a2\u00a5\u0001\u0000\u0000\u0000\u00a3\u00a1\u0001\u0000\u0000"+ - "\u0000\u00a3\u00a4\u0001\u0000\u0000\u0000\u00a4\u00a6\u0001\u0000\u0000"+ - "\u0000\u00a5\u00a3\u0001\u0000\u0000\u0000\u00a6\u00a7\u00053\u0000\u0000"+ - "\u00a7\u00b0\u0001\u0000\u0000\u0000\u00a8\u00a9\u0003\u000e\u0007\u0000"+ - "\u00a9\u00ab\u0005+\u0000\u0000\u00aa\u00ac\u0005-\u0000\u0000\u00ab\u00aa"+ - "\u0001\u0000\u0000\u0000\u00ab\u00ac\u0001\u0000\u0000\u0000\u00ac\u00ad"+ - "\u0001\u0000\u0000\u0000\u00ad\u00ae\u0005.\u0000\u0000\u00ae\u00b0\u0001"+ - "\u0000\u0000\u0000\u00af\u0093\u0001\u0000\u0000\u0000\u00af\u0096\u0001"+ - "\u0000\u0000\u0000\u00af\u0097\u0001\u0000\u0000\u0000\u00af\u0098\u0001"+ - "\u0000\u0000\u0000\u00af\u00a8\u0001\u0000\u0000\u0000\u00b0\u00b9\u0001"+ - "\u0000\u0000\u0000\u00b1\u00b2\n\u0004\u0000\u0000\u00b2\u00b3\u0005\u001f"+ - "\u0000\u0000\u00b3\u00b8\u0003\n\u0005\u0005\u00b4\u00b5\n\u0003\u0000"+ - "\u0000\u00b5\u00b6\u00050\u0000\u0000\u00b6\u00b8\u0003\n\u0005\u0004"+ - "\u00b7\u00b1\u0001\u0000\u0000\u0000\u00b7\u00b4\u0001\u0000\u0000\u0000"+ - "\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ - "\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u000b\u0001\u0000\u0000\u0000"+ - "\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00be\u0003\u000e\u0007\u0000"+ - "\u00bd\u00bf\u0005-\u0000\u0000\u00be\u00bd\u0001\u0000\u0000\u0000\u00be"+ - "\u00bf\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0"+ - "\u00c1\u0005,\u0000\u0000\u00c1\u00c2\u0003\\.\u0000\u00c2\u00cb\u0001"+ - "\u0000\u0000\u0000\u00c3\u00c5\u0003\u000e\u0007\u0000\u00c4\u00c6\u0005"+ - "-\u0000\u0000\u00c5\u00c4\u0001\u0000\u0000\u0000\u00c5\u00c6\u0001\u0000"+ - "\u0000\u0000\u00c6\u00c7\u0001\u0000\u0000\u0000\u00c7\u00c8\u00052\u0000"+ - "\u0000\u00c8\u00c9\u0003\\.\u0000\u00c9\u00cb\u0001\u0000\u0000\u0000"+ - "\u00ca\u00bc\u0001\u0000\u0000\u0000\u00ca\u00c3\u0001\u0000\u0000\u0000"+ - "\u00cb\r\u0001\u0000\u0000\u0000\u00cc\u00d2\u0003\u0010\b\u0000\u00cd"+ - "\u00ce\u0003\u0010\b\u0000\u00ce\u00cf\u0003^/\u0000\u00cf\u00d0\u0003"+ - "\u0010\b\u0000\u00d0\u00d2\u0001\u0000\u0000\u0000\u00d1\u00cc\u0001\u0000"+ - "\u0000\u0000\u00d1\u00cd\u0001\u0000\u0000\u0000\u00d2\u000f\u0001\u0000"+ - "\u0000\u0000\u00d3\u00d4\u0006\b\uffff\uffff\u0000\u00d4\u00d8\u0003\u0012"+ - "\t\u0000\u00d5\u00d6\u0007\u0000\u0000\u0000\u00d6\u00d8\u0003\u0010\b"+ - "\u0003\u00d7\u00d3\u0001\u0000\u0000\u0000\u00d7\u00d5\u0001\u0000\u0000"+ - "\u0000\u00d8\u00e1\u0001\u0000\u0000\u0000\u00d9\u00da\n\u0002\u0000\u0000"+ - "\u00da\u00db\u0007\u0001\u0000\u0000\u00db\u00e0\u0003\u0010\b\u0003\u00dc"+ - "\u00dd\n\u0001\u0000\u0000\u00dd\u00de\u0007\u0000\u0000\u0000\u00de\u00e0"+ - "\u0003\u0010\b\u0002\u00df\u00d9\u0001\u0000\u0000\u0000\u00df\u00dc\u0001"+ - "\u0000\u0000\u0000\u00e0\u00e3\u0001\u0000\u0000\u0000\u00e1\u00df\u0001"+ - "\u0000\u0000\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u0011\u0001"+ - "\u0000\u0000\u0000\u00e3\u00e1\u0001\u0000\u0000\u0000\u00e4\u00e5\u0006"+ - "\t\uffff\uffff\u0000\u00e5\u00ed\u0003:\u001d\u0000\u00e6\u00ed\u0003"+ - "2\u0019\u0000\u00e7\u00ed\u0003\u0014\n\u0000\u00e8\u00e9\u0005)\u0000"+ - "\u0000\u00e9\u00ea\u0003\n\u0005\u0000\u00ea\u00eb\u00053\u0000\u0000"+ - "\u00eb\u00ed\u0001\u0000\u0000\u0000\u00ec\u00e4\u0001\u0000\u0000\u0000"+ - "\u00ec\u00e6\u0001\u0000\u0000\u0000\u00ec\u00e7\u0001\u0000\u0000\u0000"+ - "\u00ec\u00e8\u0001\u0000\u0000\u0000\u00ed\u00f3\u0001\u0000\u0000\u0000"+ - "\u00ee\u00ef\n\u0001\u0000\u0000\u00ef\u00f0\u0005\"\u0000\u0000\u00f0"+ - "\u00f2\u0003\u0016\u000b\u0000\u00f1\u00ee\u0001\u0000\u0000\u0000\u00f2"+ - "\u00f5\u0001\u0000\u0000\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f4\u0001\u0000\u0000\u0000\u00f4\u0013\u0001\u0000\u0000\u0000\u00f5"+ - "\u00f3\u0001\u0000\u0000\u0000\u00f6\u00f7\u00036\u001b\u0000\u00f7\u0101"+ - "\u0005)\u0000\u0000\u00f8\u0102\u0005>\u0000\u0000\u00f9\u00fe\u0003\n"+ - "\u0005\u0000\u00fa\u00fb\u0005#\u0000\u0000\u00fb\u00fd\u0003\n\u0005"+ - "\u0000\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000"+ - "\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000"+ - "\u0000\u00ff\u0102\u0001\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000"+ - "\u0000\u0101\u00f8\u0001\u0000\u0000\u0000\u0101\u00f9\u0001\u0000\u0000"+ - "\u0000\u0101\u0102\u0001\u0000\u0000\u0000\u0102\u0103\u0001\u0000\u0000"+ - "\u0000\u0103\u0104\u00053\u0000\u0000\u0104\u0015\u0001\u0000\u0000\u0000"+ - "\u0105\u0106\u00036\u001b\u0000\u0106\u0017\u0001\u0000\u0000\u0000\u0107"+ - "\u0108\u0005\u000e\u0000\u0000\u0108\u0109\u0003\u001a\r\u0000\u0109\u0019"+ - "\u0001\u0000\u0000\u0000\u010a\u010f\u0003\u001c\u000e\u0000\u010b\u010c"+ - "\u0005#\u0000\u0000\u010c\u010e\u0003\u001c\u000e\u0000\u010d\u010b\u0001"+ - "\u0000\u0000\u0000\u010e\u0111\u0001\u0000\u0000\u0000\u010f\u010d\u0001"+ - "\u0000\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u001b\u0001"+ - "\u0000\u0000\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0112\u0118\u0003"+ - "\n\u0005\u0000\u0113\u0114\u00032\u0019\u0000\u0114\u0115\u0005!\u0000"+ - "\u0000\u0115\u0116\u0003\n\u0005\u0000\u0116\u0118\u0001\u0000\u0000\u0000"+ - "\u0117\u0112\u0001\u0000\u0000\u0000\u0117\u0113\u0001\u0000\u0000\u0000"+ - "\u0118\u001d\u0001\u0000\u0000\u0000\u0119\u011a\u0005\u0006\u0000\u0000"+ - "\u011a\u011f\u0003 \u0010\u0000\u011b\u011c\u0005#\u0000\u0000\u011c\u011e"+ - "\u0003 \u0010\u0000\u011d\u011b\u0001\u0000\u0000\u0000\u011e\u0121\u0001"+ - "\u0000\u0000\u0000\u011f\u011d\u0001\u0000\u0000\u0000\u011f\u0120\u0001"+ - "\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000\u0121\u011f\u0001"+ - "\u0000\u0000\u0000\u0122\u0124\u0003&\u0013\u0000\u0123\u0122\u0001\u0000"+ - "\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u0126\u0001\u0000"+ - "\u0000\u0000\u0125\u0127\u0003\"\u0011\u0000\u0126\u0125\u0001\u0000\u0000"+ - "\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u001f\u0001\u0000\u0000"+ - "\u0000\u0128\u0129\u0007\u0002\u0000\u0000\u0129!\u0001\u0000\u0000\u0000"+ - "\u012a\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d"+ - "\u0005#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000"+ - "\u0000\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000"+ - "\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000"+ - "\u0134\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001"+ - "\u0000\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015"+ - "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000"+ - "\u0000\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000"+ - "\u013c\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140"+ - "\u0003 \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001"+ - "\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001"+ - "\u0000\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000"+ - "\u0000\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000"+ - "\u0146\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149"+ - "\u0005\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001"+ - "\u0000\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003"+ - "\u001a\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ - "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e"+ - "\u0000\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000"+ - "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000"+ - "\u0153\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155"+ - "\u0156\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155"+ - "\u0001\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001"+ - "\u0000\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000"+ - "\u0000\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000"+ - "\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000"+ - "\u015e\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160"+ - "\u015e\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163"+ - "\u0005%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000"+ - "\u0000\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000"+ - "\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0003\u0000"+ - "\u0000\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c"+ - "9\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ - "Z-\u0000\u016f\u0170\u0005C\u0000\u0000\u0170\u0198\u0001\u0000\u0000"+ - "\u0000\u0171\u0198\u0003X,\u0000\u0172\u0198\u0003Z-\u0000\u0173\u0198"+ - "\u0003T*\u0000\u0174\u0198\u00051\u0000\u0000\u0175\u0198\u0003\\.\u0000"+ - "\u0176\u0177\u0005A\u0000\u0000\u0177\u017c\u0003V+\u0000\u0178\u0179"+ - "\u0005#\u0000\u0000\u0179\u017b\u0003V+\u0000\u017a\u0178\u0001\u0000"+ - "\u0000\u0000\u017b\u017e\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000"+ - "\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u017f\u0001\u0000"+ - "\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017f\u0180\u0005B\u0000"+ - "\u0000\u0180\u0198\u0001\u0000\u0000\u0000\u0181\u0182\u0005A\u0000\u0000"+ - "\u0182\u0187\u0003T*\u0000\u0183\u0184\u0005#\u0000\u0000\u0184\u0186"+ - "\u0003T*\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186\u0189\u0001\u0000"+ - "\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187\u0188\u0001\u0000"+ - "\u0000\u0000\u0188\u018a\u0001\u0000\u0000\u0000\u0189\u0187\u0001\u0000"+ - "\u0000\u0000\u018a\u018b\u0005B\u0000\u0000\u018b\u0198\u0001\u0000\u0000"+ - "\u0000\u018c\u018d\u0005A\u0000\u0000\u018d\u0192\u0003\\.\u0000\u018e"+ - "\u018f\u0005#\u0000\u0000\u018f\u0191\u0003\\.\u0000\u0190\u018e\u0001"+ - "\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192\u0190\u0001"+ - "\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193\u0195\u0001"+ - "\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196\u0005"+ - "B\u0000\u0000\u0196\u0198\u0001\u0000\u0000\u0000\u0197\u016d\u0001\u0000"+ - "\u0000\u0000\u0197\u016e\u0001\u0000\u0000\u0000\u0197\u0171\u0001\u0000"+ - "\u0000\u0000\u0197\u0172\u0001\u0000\u0000\u0000\u0197\u0173\u0001\u0000"+ - "\u0000\u0000\u0197\u0174\u0001\u0000\u0000\u0000\u0197\u0175\u0001\u0000"+ - "\u0000\u0000\u0197\u0176\u0001\u0000\u0000\u0000\u0197\u0181\u0001\u0000"+ - "\u0000\u0000\u0197\u018c\u0001\u0000\u0000\u0000\u0198;\u0001\u0000\u0000"+ - "\u0000\u0199\u019a\u0005\n\u0000\u0000\u019a\u019b\u0005\u001c\u0000\u0000"+ - "\u019b=\u0001\u0000\u0000\u0000\u019c\u019d\u0005\u0010\u0000\u0000\u019d"+ - "\u01a2\u0003@ \u0000\u019e\u019f\u0005#\u0000\u0000\u019f\u01a1\u0003"+ - "@ \u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000"+ - "\u0000\u01a3?\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0004\u0000\u0000\u01a7"+ - "\u01a6\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000\u01a8"+ - "\u01ab\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005/\u0000\u0000\u01aa\u01ac"+ - "\u0007\u0005\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ - "\u0001\u0000\u0000\u0000\u01acA\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005"+ - "\t\u0000\u0000\u01ae\u01b3\u00034\u001a\u0000\u01af\u01b0\u0005#\u0000"+ - "\u0000\u01b0\u01b2\u00034\u001a\u0000\u01b1\u01af\u0001\u0000\u0000\u0000"+ - "\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000"+ - "\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4C\u0001\u0000\u0000\u0000\u01b5"+ - "\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\u0002\u0000\u0000\u01b7"+ - "\u01bc\u00034\u001a\u0000\u01b8\u01b9\u0005#\u0000\u0000\u01b9\u01bb\u0003"+ - "4\u001a\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001\u0000"+ - "\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001\u0000"+ - "\u0000\u0000\u01bdE\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000"+ - "\u0000\u01bf\u01c0\u0005\r\u0000\u0000\u01c0\u01c5\u0003H$\u0000\u01c1"+ - "\u01c2\u0005#\u0000\u0000\u01c2\u01c4\u0003H$\u0000\u01c3\u01c1\u0001"+ - "\u0000\u0000\u0000\u01c4\u01c7\u0001\u0000\u0000\u0000\u01c5\u01c3\u0001"+ - "\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6G\u0001\u0000"+ - "\u0000\u0000\u01c7\u01c5\u0001\u0000\u0000\u0000\u01c8\u01c9\u00034\u001a"+ - "\u0000\u01c9\u01ca\u0005R\u0000\u0000\u01ca\u01cb\u00034\u001a\u0000\u01cb"+ - "I\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005\u0001\u0000\u0000\u01cd\u01ce"+ - "\u0003\u0012\t\u0000\u01ce\u01d0\u0003\\.\u0000\u01cf\u01d1\u0003P(\u0000"+ - "\u01d0\u01cf\u0001\u0000\u0000\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000"+ - "\u01d1K\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u0007\u0000\u0000\u01d3"+ - "\u01d4\u0003\u0012\t\u0000\u01d4\u01d5\u0003\\.\u0000\u01d5M\u0001\u0000"+ - "\u0000\u0000\u01d6\u01d7\u0005\f\u0000\u0000\u01d7\u01d8\u00032\u0019"+ - "\u0000\u01d8O\u0001\u0000\u0000\u0000\u01d9\u01de\u0003R)\u0000\u01da"+ - "\u01db\u0005#\u0000\u0000\u01db\u01dd\u0003R)\u0000\u01dc\u01da\u0001"+ - "\u0000\u0000\u0000\u01dd\u01e0\u0001\u0000\u0000\u0000\u01de\u01dc\u0001"+ - "\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01dfQ\u0001\u0000"+ - "\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e2\u00036\u001b"+ - "\u0000\u01e2\u01e3\u0005!\u0000\u0000\u01e3\u01e4\u0003:\u001d\u0000\u01e4"+ - "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0006\u0000\u0000\u01e6U\u0001"+ - "\u0000\u0000\u0000\u01e7\u01ea\u0003X,\u0000\u01e8\u01ea\u0003Z-\u0000"+ - "\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"+ - "\u01eaW\u0001\u0000\u0000\u0000\u01eb\u01ed\u0007\u0000\u0000\u0000\u01ec"+ - "\u01eb\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed"+ - "\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u001d\u0000\u0000\u01ef"+ - "Y\u0001\u0000\u0000\u0000\u01f0\u01f2\u0007\u0000\u0000\u0000\u01f1\u01f0"+ - "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3"+ - "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u001c\u0000\u0000\u01f4[\u0001"+ - "\u0000\u0000\u0000\u01f5\u01f6\u0005\u001b\u0000\u0000\u01f6]\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8_\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fa\u0005\u0005\u0000\u0000\u01fa\u01fb\u0003b1\u0000\u01fb"+ - "a\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005A\u0000\u0000\u01fd\u01fe\u0003"+ - "\u0002\u0001\u0000\u01fe\u01ff\u0005B\u0000\u0000\u01ffc\u0001\u0000\u0000"+ - "\u0000\u0200\u0201\u0005\u000f\u0000\u0000\u0201\u0202\u0005b\u0000\u0000"+ - "\u0202e\u0001\u0000\u0000\u0000\u0203\u0204\u0005\u000b\u0000\u0000\u0204"+ - "\u0205\u0005f\u0000\u0000\u0205g\u0001\u0000\u0000\u0000\u0206\u0207\u0005"+ - "\u0003\u0000\u0000\u0207\u020a\u0005X\u0000\u0000\u0208\u0209\u0005V\u0000"+ - "\u0000\u0209\u020b\u00034\u001a\u0000\u020a\u0208\u0001\u0000\u0000\u0000"+ - "\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u0215\u0001\u0000\u0000\u0000"+ - "\u020c\u020d\u0005W\u0000\u0000\u020d\u0212\u0003j5\u0000\u020e\u020f"+ - "\u0005#\u0000\u0000\u020f\u0211\u0003j5\u0000\u0210\u020e\u0001\u0000"+ - "\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213\u0216\u0001\u0000"+ - "\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u020c\u0001\u0000"+ - "\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216i\u0001\u0000\u0000"+ - "\u0000\u0217\u0218\u00034\u001a\u0000\u0218\u0219\u0005!\u0000\u0000\u0219"+ - "\u021b\u0001\u0000\u0000\u0000\u021a\u0217\u0001\u0000\u0000\u0000\u021a"+ - "\u021b\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ - "\u021d\u00034\u001a\u0000\u021dk\u0001\u0000\u0000\u00004w\u007f\u008e"+ - "\u009a\u00a3\u00ab\u00af\u00b7\u00b9\u00be\u00c5\u00ca\u00d1\u00d7\u00df"+ - "\u00e1\u00ec\u00f3\u00fe\u0101\u010f\u0117\u011f\u0123\u0126\u0130\u0139"+ - "\u0141\u014d\u0151\u0157\u015e\u0166\u017c\u0187\u0192\u0197\u01a2\u01a7"+ - "\u01ab\u01b3\u01bc\u01c5\u01d0\u01de\u01e9\u01ec\u01f1\u020a\u0212\u0215"+ - "\u021a"; + "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0005\u0001x\b\u0001\n\u0001\f\u0001{\t"+ + "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0003\u0002\u0083\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0092\b\u0003\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u009e\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00a5"+ + "\b\u0005\n\u0005\f\u0005\u00a8\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0003\u0005\u00af\b\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u00b3\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00bb\b\u0005\n\u0005\f\u0005\u00be"+ + "\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00c2\b\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c9\b\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00ce\b\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00d5\b\u0007"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00db\b\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0005\b\u00e3\b\b\n\b\f\b\u00e6\t\b\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00f0"+ + "\b\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f5\b\t\n\t\f\t\u00f8\t\t\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u0100\b\n\n\n\f\n\u0103"+ + "\t\n\u0003\n\u0105\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\f"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0005\r\u0111\b\r\n\r\f\r\u0114"+ + "\t\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0003"+ + "\u000e\u011b\b\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005"+ + "\u000f\u0121\b\u000f\n\u000f\f\u000f\u0124\t\u000f\u0001\u000f\u0003\u000f"+ + "\u0127\b\u000f\u0001\u000f\u0003\u000f\u012a\b\u000f\u0001\u0010\u0001"+ + "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0132"+ + "\b\u0011\n\u0011\f\u0011\u0135\t\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0003\u0013\u013d\b\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014\u0143\b\u0014\n\u0014"+ + "\f\u0014\u0146\t\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0150\b\u0016"+ + "\n\u0016\f\u0016\u0153\t\u0016\u0001\u0016\u0003\u0016\u0156\b\u0016\u0001"+ + "\u0016\u0001\u0016\u0003\u0016\u015a\b\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0018\u0001\u0018\u0003\u0018\u0161\b\u0018\u0001\u0018\u0001"+ + "\u0018\u0003\u0018\u0165\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0003\u0019\u016b\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0005"+ + "\u001a\u0170\b\u001a\n\u001a\f\u001a\u0173\t\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0005\u001b\u0178\b\u001b\n\u001b\f\u001b\u017b\t\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u018e"+ + "\b\u001e\n\u001e\f\u001e\u0191\t\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0199\b\u001e\n\u001e"+ + "\f\u001e\u019c\t\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0005\u001e\u01a4\b\u001e\n\u001e\f\u001e\u01a7"+ + "\t\u001e\u0001\u001e\u0001\u001e\u0003\u001e\u01ab\b\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0001 \u0005 \u01b4\b \n"+ + " \f \u01b7\t \u0001!\u0001!\u0003!\u01bb\b!\u0001!\u0001!\u0003!\u01bf"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c5\b\"\n\"\f\"\u01c8\t"+ + "\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01ce\b#\n#\f#\u01d1\t#\u0001$\u0001"+ + "$\u0001$\u0001$\u0005$\u01d7\b$\n$\f$\u01da\t$\u0001%\u0001%\u0001%\u0001"+ + "%\u0001&\u0001&\u0001&\u0001&\u0003&\u01e4\b&\u0001\'\u0001\'\u0001\'"+ + "\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0005)\u01f0\b)\n)"+ + "\f)\u01f3\t)\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0003"+ + ",\u01fd\b,\u0001-\u0003-\u0200\b-\u0001-\u0001-\u0001.\u0003.\u0205\b"+ + ".\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u00011\u00011\u0001"+ + "2\u00012\u00012\u00012\u00013\u00013\u00013\u00014\u00014\u00014\u0001"+ + "5\u00015\u00015\u00015\u00035\u021e\b5\u00015\u00015\u00015\u00015\u0005"+ + "5\u0224\b5\n5\f5\u0227\t5\u00035\u0229\b5\u00016\u00016\u00016\u00036"+ + "\u022e\b6\u00016\u00016\u00016\u0000\u0004\u0002\n\u0010\u00127\u0000"+ + "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ + "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjl\u0000\u0007\u0001\u0000"+ + ">?\u0001\u0000@B\u0001\u0000EF\u0002\u0000\"\"&&\u0001\u0000)*\u0002\u0000"+ + "((66\u0002\u0000779=\u024e\u0000n\u0001\u0000\u0000\u0000\u0002q\u0001"+ + "\u0000\u0000\u0000\u0004\u0082\u0001\u0000\u0000\u0000\u0006\u0091\u0001"+ + "\u0000\u0000\u0000\b\u0093\u0001\u0000\u0000\u0000\n\u00b2\u0001\u0000"+ + "\u0000\u0000\f\u00cd\u0001\u0000\u0000\u0000\u000e\u00d4\u0001\u0000\u0000"+ + "\u0000\u0010\u00da\u0001\u0000\u0000\u0000\u0012\u00ef\u0001\u0000\u0000"+ + "\u0000\u0014\u00f9\u0001\u0000\u0000\u0000\u0016\u0108\u0001\u0000\u0000"+ + "\u0000\u0018\u010a\u0001\u0000\u0000\u0000\u001a\u010d\u0001\u0000\u0000"+ + "\u0000\u001c\u011a\u0001\u0000\u0000\u0000\u001e\u011c\u0001\u0000\u0000"+ + "\u0000 \u012b\u0001\u0000\u0000\u0000\"\u012d\u0001\u0000\u0000\u0000"+ + "$\u0136\u0001\u0000\u0000\u0000&\u013c\u0001\u0000\u0000\u0000(\u013e"+ + "\u0001\u0000\u0000\u0000*\u0147\u0001\u0000\u0000\u0000,\u014b\u0001\u0000"+ + "\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015e\u0001\u0000\u0000\u0000"+ + "2\u0166\u0001\u0000\u0000\u00004\u016c\u0001\u0000\u0000\u00006\u0174"+ + "\u0001\u0000\u0000\u00008\u017c\u0001\u0000\u0000\u0000:\u017e\u0001\u0000"+ + "\u0000\u0000<\u01aa\u0001\u0000\u0000\u0000>\u01ac\u0001\u0000\u0000\u0000"+ + "@\u01af\u0001\u0000\u0000\u0000B\u01b8\u0001\u0000\u0000\u0000D\u01c0"+ + "\u0001\u0000\u0000\u0000F\u01c9\u0001\u0000\u0000\u0000H\u01d2\u0001\u0000"+ + "\u0000\u0000J\u01db\u0001\u0000\u0000\u0000L\u01df\u0001\u0000\u0000\u0000"+ + "N\u01e5\u0001\u0000\u0000\u0000P\u01e9\u0001\u0000\u0000\u0000R\u01ec"+ + "\u0001\u0000\u0000\u0000T\u01f4\u0001\u0000\u0000\u0000V\u01f8\u0001\u0000"+ + "\u0000\u0000X\u01fc\u0001\u0000\u0000\u0000Z\u01ff\u0001\u0000\u0000\u0000"+ + "\\\u0204\u0001\u0000\u0000\u0000^\u0208\u0001\u0000\u0000\u0000`\u020a"+ + "\u0001\u0000\u0000\u0000b\u020c\u0001\u0000\u0000\u0000d\u020f\u0001\u0000"+ + "\u0000\u0000f\u0213\u0001\u0000\u0000\u0000h\u0216\u0001\u0000\u0000\u0000"+ + "j\u0219\u0001\u0000\u0000\u0000l\u022d\u0001\u0000\u0000\u0000no\u0003"+ + "\u0002\u0001\u0000op\u0005\u0000\u0000\u0001p\u0001\u0001\u0000\u0000"+ + "\u0000qr\u0006\u0001\uffff\uffff\u0000rs\u0003\u0004\u0002\u0000sy\u0001"+ + "\u0000\u0000\u0000tu\n\u0001\u0000\u0000uv\u0005\u001c\u0000\u0000vx\u0003"+ + "\u0006\u0003\u0000wt\u0001\u0000\u0000\u0000x{\u0001\u0000\u0000\u0000"+ + "yw\u0001\u0000\u0000\u0000yz\u0001\u0000\u0000\u0000z\u0003\u0001\u0000"+ + "\u0000\u0000{y\u0001\u0000\u0000\u0000|\u0083\u0003b1\u0000}\u0083\u0003"+ + "\u001e\u000f\u0000~\u0083\u0003\u0018\f\u0000\u007f\u0083\u0003,\u0016"+ + "\u0000\u0080\u0083\u0003f3\u0000\u0081\u0083\u0003h4\u0000\u0082|\u0001"+ + "\u0000\u0000\u0000\u0082}\u0001\u0000\u0000\u0000\u0082~\u0001\u0000\u0000"+ + "\u0000\u0082\u007f\u0001\u0000\u0000\u0000\u0082\u0080\u0001\u0000\u0000"+ + "\u0000\u0082\u0081\u0001\u0000\u0000\u0000\u0083\u0005\u0001\u0000\u0000"+ + "\u0000\u0084\u0092\u0003.\u0017\u0000\u0085\u0092\u00032\u0019\u0000\u0086"+ + "\u0092\u0003>\u001f\u0000\u0087\u0092\u0003D\"\u0000\u0088\u0092\u0003"+ + "@ \u0000\u0089\u0092\u00030\u0018\u0000\u008a\u0092\u0003\b\u0004\u0000"+ + "\u008b\u0092\u0003F#\u0000\u008c\u0092\u0003H$\u0000\u008d\u0092\u0003"+ + "L&\u0000\u008e\u0092\u0003N\'\u0000\u008f\u0092\u0003j5\u0000\u0090\u0092"+ + "\u0003P(\u0000\u0091\u0084\u0001\u0000\u0000\u0000\u0091\u0085\u0001\u0000"+ + "\u0000\u0000\u0091\u0086\u0001\u0000\u0000\u0000\u0091\u0087\u0001\u0000"+ + "\u0000\u0000\u0091\u0088\u0001\u0000\u0000\u0000\u0091\u0089\u0001\u0000"+ + "\u0000\u0000\u0091\u008a\u0001\u0000\u0000\u0000\u0091\u008b\u0001\u0000"+ + "\u0000\u0000\u0091\u008c\u0001\u0000\u0000\u0000\u0091\u008d\u0001\u0000"+ + "\u0000\u0000\u0091\u008e\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000"+ + "\u0000\u0000\u0091\u0090\u0001\u0000\u0000\u0000\u0092\u0007\u0001\u0000"+ + "\u0000\u0000\u0093\u0094\u0005\u0013\u0000\u0000\u0094\u0095\u0003\n\u0005"+ + "\u0000\u0095\t\u0001\u0000\u0000\u0000\u0096\u0097\u0006\u0005\uffff\uffff"+ + "\u0000\u0097\u0098\u0005/\u0000\u0000\u0098\u00b3\u0003\n\u0005\u0007"+ + "\u0099\u00b3\u0003\u000e\u0007\u0000\u009a\u00b3\u0003\f\u0006\u0000\u009b"+ + "\u009d\u0003\u000e\u0007\u0000\u009c\u009e\u0005/\u0000\u0000\u009d\u009c"+ + "\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000\u0000\u009e\u009f"+ + "\u0001\u0000\u0000\u0000\u009f\u00a0\u0005,\u0000\u0000\u00a0\u00a1\u0005"+ + "+\u0000\u0000\u00a1\u00a6\u0003\u000e\u0007\u0000\u00a2\u00a3\u0005%\u0000"+ + "\u0000\u00a3\u00a5\u0003\u000e\u0007\u0000\u00a4\u00a2\u0001\u0000\u0000"+ + "\u0000\u00a5\u00a8\u0001\u0000\u0000\u0000\u00a6\u00a4\u0001\u0000\u0000"+ + "\u0000\u00a6\u00a7\u0001\u0000\u0000\u0000\u00a7\u00a9\u0001\u0000\u0000"+ + "\u0000\u00a8\u00a6\u0001\u0000\u0000\u0000\u00a9\u00aa\u00055\u0000\u0000"+ + "\u00aa\u00b3\u0001\u0000\u0000\u0000\u00ab\u00ac\u0003\u000e\u0007\u0000"+ + "\u00ac\u00ae\u0005-\u0000\u0000\u00ad\u00af\u0005/\u0000\u0000\u00ae\u00ad"+ + "\u0001\u0000\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0"+ + "\u0001\u0000\u0000\u0000\u00b0\u00b1\u00050\u0000\u0000\u00b1\u00b3\u0001"+ + "\u0000\u0000\u0000\u00b2\u0096\u0001\u0000\u0000\u0000\u00b2\u0099\u0001"+ + "\u0000\u0000\u0000\u00b2\u009a\u0001\u0000\u0000\u0000\u00b2\u009b\u0001"+ + "\u0000\u0000\u0000\u00b2\u00ab\u0001\u0000\u0000\u0000\u00b3\u00bc\u0001"+ + "\u0000\u0000\u0000\u00b4\u00b5\n\u0004\u0000\u0000\u00b5\u00b6\u0005!"+ + "\u0000\u0000\u00b6\u00bb\u0003\n\u0005\u0005\u00b7\u00b8\n\u0003\u0000"+ + "\u0000\u00b8\u00b9\u00052\u0000\u0000\u00b9\u00bb\u0003\n\u0005\u0004"+ + "\u00ba\u00b4\u0001\u0000\u0000\u0000\u00ba\u00b7\u0001\u0000\u0000\u0000"+ + "\u00bb\u00be\u0001\u0000\u0000\u0000\u00bc\u00ba\u0001\u0000\u0000\u0000"+ + "\u00bc\u00bd\u0001\u0000\u0000\u0000\u00bd\u000b\u0001\u0000\u0000\u0000"+ + "\u00be\u00bc\u0001\u0000\u0000\u0000\u00bf\u00c1\u0003\u000e\u0007\u0000"+ + "\u00c0\u00c2\u0005/\u0000\u0000\u00c1\u00c0\u0001\u0000\u0000\u0000\u00c1"+ + "\u00c2\u0001\u0000\u0000\u0000\u00c2\u00c3\u0001\u0000\u0000\u0000\u00c3"+ + "\u00c4\u0005.\u0000\u0000\u00c4\u00c5\u0003^/\u0000\u00c5\u00ce\u0001"+ + "\u0000\u0000\u0000\u00c6\u00c8\u0003\u000e\u0007\u0000\u00c7\u00c9\u0005"+ + "/\u0000\u0000\u00c8\u00c7\u0001\u0000\u0000\u0000\u00c8\u00c9\u0001\u0000"+ + "\u0000\u0000\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u00054\u0000"+ + "\u0000\u00cb\u00cc\u0003^/\u0000\u00cc\u00ce\u0001\u0000\u0000\u0000\u00cd"+ + "\u00bf\u0001\u0000\u0000\u0000\u00cd\u00c6\u0001\u0000\u0000\u0000\u00ce"+ + "\r\u0001\u0000\u0000\u0000\u00cf\u00d5\u0003\u0010\b\u0000\u00d0\u00d1"+ + "\u0003\u0010\b\u0000\u00d1\u00d2\u0003`0\u0000\u00d2\u00d3\u0003\u0010"+ + "\b\u0000\u00d3\u00d5\u0001\u0000\u0000\u0000\u00d4\u00cf\u0001\u0000\u0000"+ + "\u0000\u00d4\u00d0\u0001\u0000\u0000\u0000\u00d5\u000f\u0001\u0000\u0000"+ + "\u0000\u00d6\u00d7\u0006\b\uffff\uffff\u0000\u00d7\u00db\u0003\u0012\t"+ + "\u0000\u00d8\u00d9\u0007\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0003"+ + "\u00da\u00d6\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000"+ + "\u00db\u00e4\u0001\u0000\u0000\u0000\u00dc\u00dd\n\u0002\u0000\u0000\u00dd"+ + "\u00de\u0007\u0001\u0000\u0000\u00de\u00e3\u0003\u0010\b\u0003\u00df\u00e0"+ + "\n\u0001\u0000\u0000\u00e0\u00e1\u0007\u0000\u0000\u0000\u00e1\u00e3\u0003"+ + "\u0010\b\u0002\u00e2\u00dc\u0001\u0000\u0000\u0000\u00e2\u00df\u0001\u0000"+ + "\u0000\u0000\u00e3\u00e6\u0001\u0000\u0000\u0000\u00e4\u00e2\u0001\u0000"+ + "\u0000\u0000\u00e4\u00e5\u0001\u0000\u0000\u0000\u00e5\u0011\u0001\u0000"+ + "\u0000\u0000\u00e6\u00e4\u0001\u0000\u0000\u0000\u00e7\u00e8\u0006\t\uffff"+ + "\uffff\u0000\u00e8\u00f0\u0003<\u001e\u0000\u00e9\u00f0\u00034\u001a\u0000"+ + "\u00ea\u00f0\u0003\u0014\n\u0000\u00eb\u00ec\u0005+\u0000\u0000\u00ec"+ + "\u00ed\u0003\n\u0005\u0000\u00ed\u00ee\u00055\u0000\u0000\u00ee\u00f0"+ + "\u0001\u0000\u0000\u0000\u00ef\u00e7\u0001\u0000\u0000\u0000\u00ef\u00e9"+ + "\u0001\u0000\u0000\u0000\u00ef\u00ea\u0001\u0000\u0000\u0000\u00ef\u00eb"+ + "\u0001\u0000\u0000\u0000\u00f0\u00f6\u0001\u0000\u0000\u0000\u00f1\u00f2"+ + "\n\u0001\u0000\u0000\u00f2\u00f3\u0005$\u0000\u0000\u00f3\u00f5\u0003"+ + "\u0016\u000b\u0000\u00f4\u00f1\u0001\u0000\u0000\u0000\u00f5\u00f8\u0001"+ + "\u0000\u0000\u0000\u00f6\u00f4\u0001\u0000\u0000\u0000\u00f6\u00f7\u0001"+ + "\u0000\u0000\u0000\u00f7\u0013\u0001\u0000\u0000\u0000\u00f8\u00f6\u0001"+ + "\u0000\u0000\u0000\u00f9\u00fa\u00038\u001c\u0000\u00fa\u0104\u0005+\u0000"+ + "\u0000\u00fb\u0105\u0005@\u0000\u0000\u00fc\u0101\u0003\n\u0005\u0000"+ + "\u00fd\u00fe\u0005%\u0000\u0000\u00fe\u0100\u0003\n\u0005\u0000\u00ff"+ + "\u00fd\u0001\u0000\u0000\u0000\u0100\u0103\u0001\u0000\u0000\u0000\u0101"+ + "\u00ff\u0001\u0000\u0000\u0000\u0101\u0102\u0001\u0000\u0000\u0000\u0102"+ + "\u0105\u0001\u0000\u0000\u0000\u0103\u0101\u0001\u0000\u0000\u0000\u0104"+ + "\u00fb\u0001\u0000\u0000\u0000\u0104\u00fc\u0001\u0000\u0000\u0000\u0104"+ + "\u0105\u0001\u0000\u0000\u0000\u0105\u0106\u0001\u0000\u0000\u0000\u0106"+ + "\u0107\u00055\u0000\u0000\u0107\u0015\u0001\u0000\u0000\u0000\u0108\u0109"+ + "\u00038\u001c\u0000\u0109\u0017\u0001\u0000\u0000\u0000\u010a\u010b\u0005"+ + "\u000f\u0000\u0000\u010b\u010c\u0003\u001a\r\u0000\u010c\u0019\u0001\u0000"+ + "\u0000\u0000\u010d\u0112\u0003\u001c\u000e\u0000\u010e\u010f\u0005%\u0000"+ + "\u0000\u010f\u0111\u0003\u001c\u000e\u0000\u0110\u010e\u0001\u0000\u0000"+ + "\u0000\u0111\u0114\u0001\u0000\u0000\u0000\u0112\u0110\u0001\u0000\u0000"+ + "\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u001b\u0001\u0000\u0000"+ + "\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u011b\u0003\n\u0005\u0000"+ + "\u0116\u0117\u00034\u001a\u0000\u0117\u0118\u0005#\u0000\u0000\u0118\u0119"+ + "\u0003\n\u0005\u0000\u0119\u011b\u0001\u0000\u0000\u0000\u011a\u0115\u0001"+ + "\u0000\u0000\u0000\u011a\u0116\u0001\u0000\u0000\u0000\u011b\u001d\u0001"+ + "\u0000\u0000\u0000\u011c\u011d\u0005\u0006\u0000\u0000\u011d\u0122\u0003"+ + " \u0010\u0000\u011e\u011f\u0005%\u0000\u0000\u011f\u0121\u0003 \u0010"+ + "\u0000\u0120\u011e\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000"+ + "\u0000\u0122\u0120\u0001\u0000\u0000\u0000\u0122\u0123\u0001\u0000\u0000"+ + "\u0000\u0123\u0126\u0001\u0000\u0000\u0000\u0124\u0122\u0001\u0000\u0000"+ + "\u0000\u0125\u0127\u0003&\u0013\u0000\u0126\u0125\u0001\u0000\u0000\u0000"+ + "\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u0129\u0001\u0000\u0000\u0000"+ + "\u0128\u012a\u0003\"\u0011\u0000\u0129\u0128\u0001\u0000\u0000\u0000\u0129"+ + "\u012a\u0001\u0000\u0000\u0000\u012a\u001f\u0001\u0000\u0000\u0000\u012b"+ + "\u012c\u0005\u0018\u0000\u0000\u012c!\u0001\u0000\u0000\u0000\u012d\u012e"+ + "\u0005J\u0000\u0000\u012e\u0133\u0003$\u0012\u0000\u012f\u0130\u0005%"+ + "\u0000\u0000\u0130\u0132\u0003$\u0012\u0000\u0131\u012f\u0001\u0000\u0000"+ + "\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000"+ + "\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134#\u0001\u0000\u0000\u0000"+ + "\u0135\u0133\u0001\u0000\u0000\u0000\u0136\u0137\u0003^/\u0000\u0137\u0138"+ + "\u0005#\u0000\u0000\u0138\u0139\u0003^/\u0000\u0139%\u0001\u0000\u0000"+ + "\u0000\u013a\u013d\u0003(\u0014\u0000\u013b\u013d\u0003*\u0015\u0000\u013c"+ + "\u013a\u0001\u0000\u0000\u0000\u013c\u013b\u0001\u0000\u0000\u0000\u013d"+ + "\'\u0001\u0000\u0000\u0000\u013e\u013f\u0005K\u0000\u0000\u013f\u0144"+ + "\u0003 \u0010\u0000\u0140\u0141\u0005%\u0000\u0000\u0141\u0143\u0003 "+ + "\u0010\u0000\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000"+ + "\u0000\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000"+ + "\u0000\u0000\u0145)\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000"+ + "\u0000\u0147\u0148\u0005C\u0000\u0000\u0148\u0149\u0003(\u0014\u0000\u0149"+ + "\u014a\u0005D\u0000\u0000\u014a+\u0001\u0000\u0000\u0000\u014b\u014c\u0005"+ + "\f\u0000\u0000\u014c\u0151\u0003 \u0010\u0000\u014d\u014e\u0005%\u0000"+ + "\u0000\u014e\u0150\u0003 \u0010\u0000\u014f\u014d\u0001\u0000\u0000\u0000"+ + "\u0150\u0153\u0001\u0000\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000"+ + "\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0155\u0001\u0000\u0000\u0000"+ + "\u0153\u0151\u0001\u0000\u0000\u0000\u0154\u0156\u0003\u001a\r\u0000\u0155"+ + "\u0154\u0001\u0000\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156"+ + "\u0159\u0001\u0000\u0000\u0000\u0157\u0158\u0005 \u0000\u0000\u0158\u015a"+ + "\u0003\u001a\r\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a\u0001"+ + "\u0000\u0000\u0000\u015a-\u0001\u0000\u0000\u0000\u015b\u015c\u0005\u0004"+ + "\u0000\u0000\u015c\u015d\u0003\u001a\r\u0000\u015d/\u0001\u0000\u0000"+ + "\u0000\u015e\u0160\u0005\u0012\u0000\u0000\u015f\u0161\u0003\u001a\r\u0000"+ + "\u0160\u015f\u0001\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000"+ + "\u0161\u0164\u0001\u0000\u0000\u0000\u0162\u0163\u0005 \u0000\u0000\u0163"+ + "\u0165\u0003\u001a\r\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0164\u0165"+ + "\u0001\u0000\u0000\u0000\u01651\u0001\u0000\u0000\u0000\u0166\u0167\u0005"+ + "\b\u0000\u0000\u0167\u016a\u0003\u001a\r\u0000\u0168\u0169\u0005 \u0000"+ + "\u0000\u0169\u016b\u0003\u001a\r\u0000\u016a\u0168\u0001\u0000\u0000\u0000"+ + "\u016a\u016b\u0001\u0000\u0000\u0000\u016b3\u0001\u0000\u0000\u0000\u016c"+ + "\u0171\u00038\u001c\u0000\u016d\u016e\u0005\'\u0000\u0000\u016e\u0170"+ + "\u00038\u001c\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001"+ + "\u0000\u0000\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001"+ + "\u0000\u0000\u0000\u01725\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000"+ + "\u0000\u0000\u0174\u0179\u0003:\u001d\u0000\u0175\u0176\u0005\'\u0000"+ + "\u0000\u0176\u0178\u0003:\u001d\u0000\u0177\u0175\u0001\u0000\u0000\u0000"+ + "\u0178\u017b\u0001\u0000\u0000\u0000\u0179\u0177\u0001\u0000\u0000\u0000"+ + "\u0179\u017a\u0001\u0000\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b"+ + "\u0179\u0001\u0000\u0000\u0000\u017c\u017d\u0007\u0002\u0000\u0000\u017d"+ + "9\u0001\u0000\u0000\u0000\u017e\u017f\u0005O\u0000\u0000\u017f;\u0001"+ + "\u0000\u0000\u0000\u0180\u01ab\u00050\u0000\u0000\u0181\u0182\u0003\\"+ + ".\u0000\u0182\u0183\u0005E\u0000\u0000\u0183\u01ab\u0001\u0000\u0000\u0000"+ + "\u0184\u01ab\u0003Z-\u0000\u0185\u01ab\u0003\\.\u0000\u0186\u01ab\u0003"+ + "V+\u0000\u0187\u01ab\u00053\u0000\u0000\u0188\u01ab\u0003^/\u0000\u0189"+ + "\u018a\u0005C\u0000\u0000\u018a\u018f\u0003X,\u0000\u018b\u018c\u0005"+ + "%\u0000\u0000\u018c\u018e\u0003X,\u0000\u018d\u018b\u0001\u0000\u0000"+ + "\u0000\u018e\u0191\u0001\u0000\u0000\u0000\u018f\u018d\u0001\u0000\u0000"+ + "\u0000\u018f\u0190\u0001\u0000\u0000\u0000\u0190\u0192\u0001\u0000\u0000"+ + "\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0193\u0005D\u0000\u0000"+ + "\u0193\u01ab\u0001\u0000\u0000\u0000\u0194\u0195\u0005C\u0000\u0000\u0195"+ + "\u019a\u0003V+\u0000\u0196\u0197\u0005%\u0000\u0000\u0197\u0199\u0003"+ + "V+\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199\u019c\u0001\u0000\u0000"+ + "\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000\u0000"+ + "\u0000\u019b\u019d\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000"+ + "\u0000\u019d\u019e\u0005D\u0000\u0000\u019e\u01ab\u0001\u0000\u0000\u0000"+ + "\u019f\u01a0\u0005C\u0000\u0000\u01a0\u01a5\u0003^/\u0000\u01a1\u01a2"+ + "\u0005%\u0000\u0000\u01a2\u01a4\u0003^/\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a4\u01a7\u0001\u0000\u0000\u0000\u01a5\u01a3\u0001\u0000"+ + "\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000\u0000\u01a6\u01a8\u0001\u0000"+ + "\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005D\u0000"+ + "\u0000\u01a9\u01ab\u0001\u0000\u0000\u0000\u01aa\u0180\u0001\u0000\u0000"+ + "\u0000\u01aa\u0181\u0001\u0000\u0000\u0000\u01aa\u0184\u0001\u0000\u0000"+ + "\u0000\u01aa\u0185\u0001\u0000\u0000\u0000\u01aa\u0186\u0001\u0000\u0000"+ + "\u0000\u01aa\u0187\u0001\u0000\u0000\u0000\u01aa\u0188\u0001\u0000\u0000"+ + "\u0000\u01aa\u0189\u0001\u0000\u0000\u0000\u01aa\u0194\u0001\u0000\u0000"+ + "\u0000\u01aa\u019f\u0001\u0000\u0000\u0000\u01ab=\u0001\u0000\u0000\u0000"+ + "\u01ac\u01ad\u0005\n\u0000\u0000\u01ad\u01ae\u0005\u001e\u0000\u0000\u01ae"+ + "?\u0001\u0000\u0000\u0000\u01af\u01b0\u0005\u0011\u0000\u0000\u01b0\u01b5"+ + "\u0003B!\u0000\u01b1\u01b2\u0005%\u0000\u0000\u01b2\u01b4\u0003B!\u0000"+ + "\u01b3\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7\u0001\u0000\u0000\u0000"+ + "\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000"+ + "\u01b6A\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001\u0000\u0000\u0000\u01b8"+ + "\u01ba\u0003\n\u0005\u0000\u01b9\u01bb\u0007\u0003\u0000\u0000\u01ba\u01b9"+ + "\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb\u01be"+ + "\u0001\u0000\u0000\u0000\u01bc\u01bd\u00051\u0000\u0000\u01bd\u01bf\u0007"+ + "\u0004\u0000\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf\u0001"+ + "\u0000\u0000\u0000\u01bfC\u0001\u0000\u0000\u0000\u01c0\u01c1\u0005\t"+ + "\u0000\u0000\u01c1\u01c6\u00036\u001b\u0000\u01c2\u01c3\u0005%\u0000\u0000"+ + "\u01c3\u01c5\u00036\u001b\u0000\u01c4\u01c2\u0001\u0000\u0000\u0000\u01c5"+ + "\u01c8\u0001\u0000\u0000\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c6"+ + "\u01c7\u0001\u0000\u0000\u0000\u01c7E\u0001\u0000\u0000\u0000\u01c8\u01c6"+ + "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005\u0002\u0000\u0000\u01ca\u01cf"+ + "\u00036\u001b\u0000\u01cb\u01cc\u0005%\u0000\u0000\u01cc\u01ce\u00036"+ + "\u001b\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce\u01d1\u0001\u0000"+ + "\u0000\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01cf\u01d0\u0001\u0000"+ + "\u0000\u0000\u01d0G\u0001\u0000\u0000\u0000\u01d1\u01cf\u0001\u0000\u0000"+ + "\u0000\u01d2\u01d3\u0005\u000e\u0000\u0000\u01d3\u01d8\u0003J%\u0000\u01d4"+ + "\u01d5\u0005%\u0000\u0000\u01d5\u01d7\u0003J%\u0000\u01d6\u01d4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01da\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001"+ + "\u0000\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9I\u0001\u0000"+ + "\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01db\u01dc\u00036\u001b"+ + "\u0000\u01dc\u01dd\u0005S\u0000\u0000\u01dd\u01de\u00036\u001b\u0000\u01de"+ + "K\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u0001\u0000\u0000\u01e0\u01e1"+ + "\u0003\u0012\t\u0000\u01e1\u01e3\u0003^/\u0000\u01e2\u01e4\u0003R)\u0000"+ + "\u01e3\u01e2\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000"+ + "\u01e4M\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005\u0007\u0000\u0000\u01e6"+ + "\u01e7\u0003\u0012\t\u0000\u01e7\u01e8\u0003^/\u0000\u01e8O\u0001\u0000"+ + "\u0000\u0000\u01e9\u01ea\u0005\r\u0000\u0000\u01ea\u01eb\u00034\u001a"+ + "\u0000\u01ebQ\u0001\u0000\u0000\u0000\u01ec\u01f1\u0003T*\u0000\u01ed"+ + "\u01ee\u0005%\u0000\u0000\u01ee\u01f0\u0003T*\u0000\u01ef\u01ed\u0001"+ + "\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000\u01f1\u01ef\u0001"+ + "\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2S\u0001\u0000"+ + "\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5\u00038\u001c"+ + "\u0000\u01f5\u01f6\u0005#\u0000\u0000\u01f6\u01f7\u0003<\u001e\u0000\u01f7"+ + "U\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0005\u0000\u0000\u01f9W\u0001"+ + "\u0000\u0000\u0000\u01fa\u01fd\u0003Z-\u0000\u01fb\u01fd\u0003\\.\u0000"+ + "\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fc\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fdY\u0001\u0000\u0000\u0000\u01fe\u0200\u0007\u0000\u0000\u0000\u01ff"+ + "\u01fe\u0001\u0000\u0000\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200"+ + "\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0005\u001f\u0000\u0000\u0202"+ + "[\u0001\u0000\u0000\u0000\u0203\u0205\u0007\u0000\u0000\u0000\u0204\u0203"+ + "\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0206"+ + "\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u001e\u0000\u0000\u0207]\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0005\u001d\u0000\u0000\u0209_\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0007\u0006\u0000\u0000\u020ba\u0001\u0000\u0000"+ + "\u0000\u020c\u020d\u0005\u0005\u0000\u0000\u020d\u020e\u0003d2\u0000\u020e"+ + "c\u0001\u0000\u0000\u0000\u020f\u0210\u0005C\u0000\u0000\u0210\u0211\u0003"+ + "\u0002\u0001\u0000\u0211\u0212\u0005D\u0000\u0000\u0212e\u0001\u0000\u0000"+ + "\u0000\u0213\u0214\u0005\u0010\u0000\u0000\u0214\u0215\u0005c\u0000\u0000"+ + "\u0215g\u0001\u0000\u0000\u0000\u0216\u0217\u0005\u000b\u0000\u0000\u0217"+ + "\u0218\u0005g\u0000\u0000\u0218i\u0001\u0000\u0000\u0000\u0219\u021a\u0005"+ + "\u0003\u0000\u0000\u021a\u021d\u0005Y\u0000\u0000\u021b\u021c\u0005W\u0000"+ + "\u0000\u021c\u021e\u00036\u001b\u0000\u021d\u021b\u0001\u0000\u0000\u0000"+ + "\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u0228\u0001\u0000\u0000\u0000"+ + "\u021f\u0220\u0005X\u0000\u0000\u0220\u0225\u0003l6\u0000\u0221\u0222"+ + "\u0005%\u0000\u0000\u0222\u0224\u0003l6\u0000\u0223\u0221\u0001\u0000"+ + "\u0000\u0000\u0224\u0227\u0001\u0000\u0000\u0000\u0225\u0223\u0001\u0000"+ + "\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000\u0226\u0229\u0001\u0000"+ + "\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0228\u021f\u0001\u0000"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229k\u0001\u0000\u0000"+ + "\u0000\u022a\u022b\u00036\u001b\u0000\u022b\u022c\u0005#\u0000\u0000\u022c"+ + "\u022e\u0001\u0000\u0000\u0000\u022d\u022a\u0001\u0000\u0000\u0000\u022d"+ + "\u022e\u0001\u0000\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000\u022f"+ + "\u0230\u00036\u001b\u0000\u0230m\u0001\u0000\u0000\u00007y\u0082\u0091"+ + "\u009d\u00a6\u00ae\u00b2\u00ba\u00bc\u00c1\u00c8\u00cd\u00d4\u00da\u00e2"+ + "\u00e4\u00ef\u00f6\u0101\u0104\u0112\u011a\u0122\u0126\u0129\u0133\u013c"+ + "\u0144\u0151\u0155\u0159\u0160\u0164\u016a\u0171\u0179\u018f\u019a\u01a5"+ + "\u01aa\u01b5\u01ba\u01be\u01c6\u01cf\u01d8\u01e3\u01f1\u01fc\u01ff\u0204"+ + "\u021d\u0225\u0228\u022d"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 5122eb07371b1..92c9793fd8d9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -365,13 +365,13 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

The default implementation does nothing.

*/ - @Override public void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + @Override public void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + @Override public void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } /** * {@inheritDoc} * @@ -432,6 +432,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index a32ac9bd9100c..25eb59648fe6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -221,7 +221,7 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { return visitChildren(ctx); } + @Override public T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -257,6 +257,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 6e8000f7fcf8e..ac4047ffbd22f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -336,15 +336,15 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Enter a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Enter a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree */ - void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** - * Exit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Exit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree */ - void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#fromOptions}. * @param ctx the parse tree @@ -395,6 +395,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + */ + void enterMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + */ + void exitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#evalCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index d6e83b37a0f39..37b94cd585c11 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -204,11 +204,11 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Visit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Visit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree * @return the visitor result */ - T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#fromOptions}. * @param ctx the parse tree @@ -239,6 +239,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#evalCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 67f8eb407ee11..b5e348589fa7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -9,8 +9,8 @@ import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.common.Strings; -import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.FromIdentifierContext; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IdentifierContext; +import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IndexIdentifierContext; import java.util.List; @@ -24,8 +24,8 @@ public String visitIdentifier(IdentifierContext ctx) { } @Override - public String visitFromIdentifier(FromIdentifierContext ctx) { - return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); + public String visitIndexIdentifier(IndexIdentifierContext ctx) { + return ctx == null ? null : unquoteIdentifier(null, ctx.INDEX_UNQUOTED_IDENTIFIER()); } protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { @@ -42,7 +42,7 @@ protected static String unquoteIdString(String quotedString) { return quotedString.substring(1, quotedString.length() - 1).replace("``", "`"); } - public String visitFromIdentifiers(List ctx) { + public String visitIndexIdentifiers(List ctx) { return Strings.collectionToDelimitedString(visitList(this, ctx, String.class), ","); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index aea835c11ad3d..b8fc29e4ef64d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -10,6 +10,7 @@ import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.Build; import org.elasticsearch.core.Tuple; import org.elasticsearch.dissect.DissectException; import org.elasticsearch.dissect.DissectParser; @@ -205,7 +206,7 @@ public LogicalPlan visitRowCommand(EsqlBaseParser.RowCommandContext ctx) { @Override public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitFromIdentifiers(ctx.fromIdentifier())); + TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); Map metadataMap = new LinkedHashMap<>(); if (ctx.metadata() != null) { var deprecatedContext = ctx.metadata().deprecated_metadata(); @@ -222,8 +223,8 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { metadataOptionContext = ctx.metadata().metadataOption(); } - for (var c : metadataOptionContext.fromIdentifier()) { - String id = visitFromIdentifier(c); + for (var c : metadataOptionContext.indexIdentifier()) { + String id = visitIndexIdentifier(c); Source src = source(c); if (MetadataAttribute.isSupported(id) == false) { throw new ParsingException(src, "unsupported metadata field [" + id + "]"); @@ -253,10 +254,19 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { @Override public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { - List aggregates = new ArrayList<>(visitFields(ctx.stats)); - List groupings = visitGrouping(ctx.grouping); + final Stats stats = stats(source(ctx), ctx.grouping, ctx.stats); + return input -> new EsqlAggregate(source(ctx), input, stats.groupings, stats.aggregates); + } + + private record Stats(List groupings, List aggregates) { + + } + + private Stats stats(Source source, EsqlBaseParser.FieldsContext groupingsCtx, EsqlBaseParser.FieldsContext aggregatesCtx) { + List groupings = visitGrouping(groupingsCtx); + List aggregates = new ArrayList<>(visitFields(aggregatesCtx)); if (aggregates.isEmpty() && groupings.isEmpty()) { - throw new ParsingException(source(ctx), "At least one aggregation or grouping expression required in [{}]", ctx.getText()); + throw new ParsingException(source, "At least one aggregation or grouping expression required in [{}]", source.text()); } // grouping keys are automatically added as aggregations however the user is not allowed to specify them if (groupings.isEmpty() == false && aggregates.isEmpty() == false) { @@ -279,8 +289,7 @@ public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { for (Expression group : groupings) { aggregates.add(Expressions.attribute(group)); } - - return input -> new EsqlAggregate(source(ctx), input, new ArrayList<>(groupings), aggregates); + return new Stats(new ArrayList<>(groupings), aggregates); } private void fail(Expression exp, String message, Object... args) { @@ -427,5 +436,20 @@ private static Tuple parsePolicyName(Token policyToken) { return new Tuple<>(mode, policyName); } + @Override + public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { + if (Build.current().isSnapshot() == false) { + throw new IllegalArgumentException("METRICS command currently requires a snapshot build"); + } + Source source = source(ctx); + TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); + var unresolvedRelation = new EsqlUnresolvedRelation(source, table, List.of()); + if (ctx.aggregates == null && ctx.grouping == null) { + return unresolvedRelation; + } + final Stats stats = stats(source, ctx.grouping, ctx.aggregates); + return new EsqlAggregate(source, unresolvedRelation, stats.groupings, stats.aggregates); + } + interface PlanFactory extends Function {} } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java index 9feb5e9b009d1..490ec174eea5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java @@ -68,6 +68,10 @@ public List aggregates() { return aggregates; } + public AggregateExec withMode(Mode newMode) { + return new AggregateExec(source(), child(), groupings, aggregates, newMode, estimatedRowSize); + } + /** * Estimate of the number of bytes that'll be loaded per position before * the stream of pages is consumed. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java index b79d7cc0fbdde..3d626e65f6f11 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.List; @@ -106,12 +105,13 @@ static int estimateSize(DataType dataType) { ElementType elementType = PlannerUtils.toElementType(dataType); return switch (elementType) { case BOOLEAN -> 1; - case BYTES_REF -> { - if (dataType == DataTypes.IP) { - yield 16; - } - yield 50; // wild estimate for the size of a string. - } + case BYTES_REF -> switch (dataType.typeName()) { + case "ip" -> 16; // IP addresses, both IPv4 and IPv6, are encoded using 16 bytes. + case "version" -> 15; // 8.15.2-SNAPSHOT is 15 bytes, most are shorter, some can be longer + case "geo_point", "cartesian_point" -> 21; // WKB for points is typically 21 bytes. + case "geo_shape", "cartesian_shape" -> 200; // wild estimate, based on some test data (airport_city_boundaries) + default -> 50; // wild estimate for the size of a string. + }; case DOC -> throw new EsqlIllegalArgumentException("can't load a [doc] with field extraction"); case DOUBLE -> Double.BYTES; case INT -> Integer.BYTES; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 8c5392ccc1781..f5e4dead67347 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; import org.elasticsearch.xpack.ql.InvalidArgumentException; @@ -54,6 +55,20 @@ public final PhysicalOperation groupingPhysicalOperation( var aggregates = aggregateExec.aggregates(); var sourceLayout = source.layout; + AggregatorMode aggregatorMode; + + if (mode == AggregateExec.Mode.FINAL) { + aggregatorMode = AggregatorMode.FINAL; + } else if (mode == AggregateExec.Mode.PARTIAL) { + if (aggregateExec.child() instanceof ExchangeSourceExec) {// the reducer step at data node (local) level + aggregatorMode = AggregatorMode.INTERMEDIATE; + } else { + aggregatorMode = AggregatorMode.INITIAL; + } + } else { + assert false : "Invalid aggregator mode [" + mode + "]"; + aggregatorMode = AggregatorMode.SINGLE; + } if (aggregateExec.groupings().isEmpty()) { // not grouping @@ -65,20 +80,18 @@ public final PhysicalOperation groupingPhysicalOperation( } else { layout.append(aggregateMapper.mapNonGrouping(aggregates)); } + // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, false, // non-grouping s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode)) ); if (aggregatorFactories.isEmpty() == false) { - operatorFactory = new AggregationOperator.AggregationOperatorFactory( - aggregatorFactories, - mode == AggregateExec.Mode.FINAL ? AggregatorMode.FINAL : AggregatorMode.INITIAL - ); + operatorFactory = new AggregationOperator.AggregationOperatorFactory(aggregatorFactories, aggregatorMode); } } else { // grouping @@ -136,7 +149,7 @@ else if (mode == AggregateExec.Mode.PARTIAL) { // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, true, // grouping s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode)) @@ -219,7 +232,7 @@ private record AggFunctionSupplierContext(AggregatorFunctionSupplier supplier, A private void aggregatesToFactory( List aggregates, - AggregateExec.Mode mode, + AggregatorMode mode, Layout layout, boolean grouping, Consumer consumer @@ -228,11 +241,9 @@ private void aggregatesToFactory( if (ne instanceof Alias alias) { var child = alias.child(); if (child instanceof AggregateFunction aggregateFunction) { - AggregatorMode aggMode = null; List sourceAttr; - if (mode == AggregateExec.Mode.PARTIAL) { - aggMode = AggregatorMode.INITIAL; + if (mode == AggregatorMode.INITIAL) { // TODO: this needs to be made more reliable - use casting to blow up when dealing with expressions (e+1) Expression field = aggregateFunction.field(); // Only count can now support literals - all the other aggs should be optimized away @@ -257,9 +268,7 @@ private void aggregatesToFactory( } sourceAttr = List.of(attr); } - - } else if (mode == AggregateExec.Mode.FINAL) { - aggMode = AggregatorMode.FINAL; + } else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { if (grouping) { sourceAttr = aggregateMapper.mapGrouping(aggregateFunction); } else { @@ -279,7 +288,7 @@ private void aggregatesToFactory( assert inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0); } if (aggregateFunction instanceof ToAggregator agg) { - consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), aggMode)); + consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), mode)); } else { throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 62b305a68bc28..1212e77557ca6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -54,7 +54,7 @@ public class Mapper { private final FunctionRegistry functionRegistry; - private final boolean localMode; + private final boolean localMode; // non-coordinator (data node) mode public Mapper(FunctionRegistry functionRegistry) { this.functionRegistry = functionRegistry; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 26c57f13e16c4..fbfc57261bc40 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -87,23 +88,19 @@ public static PhysicalPlan dataNodeReductionPlan(LogicalPlan plan, PhysicalPlan if (pipelineBreakers.isEmpty() == false) { UnaryPlan pipelineBreaker = (UnaryPlan) pipelineBreakers.get(0); - if (pipelineBreaker instanceof TopN topN) { - return new TopNExec(topN.source(), unused, topN.order(), topN.limit(), 2000); + if (pipelineBreaker instanceof TopN) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + return physicalPlan.collectFirstChildren(TopNExec.class::isInstance).get(0); } else if (pipelineBreaker instanceof Limit limit) { return new LimitExec(limit.source(), unused, limit.limit()); } else if (pipelineBreaker instanceof OrderBy order) { return new OrderExec(order.source(), unused, order.order()); - } else if (pipelineBreaker instanceof Aggregate aggregate) { - // TODO handle this as a special PARTIAL step (intermediate) - /*return new AggregateExec( - aggregate.source(), - unused, - aggregate.groupings(), - aggregate.aggregates(), - AggregateExec.Mode.PARTIAL, - 0 - );*/ - return null; + } else if (pipelineBreaker instanceof Aggregate) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + var aggregate = (AggregateExec) physicalPlan.collectFirstChildren(AggregateExec.class::isInstance).get(0); + return aggregate.withMode(AggregateExec.Mode.PARTIAL); } else { throw new EsqlIllegalArgumentException("unsupported unary physical plan node [" + pipelineBreaker.nodeName() + "]"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 7b38197dde95a..d9005d5997b34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -205,6 +205,7 @@ public void execute( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new Result(collectedPages, collectedProfiles))) ) { // run compute on the coordinator + exchangeSource.addCompletionListener(refs.acquire()); runCompute( rootTask, new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), @@ -722,6 +723,7 @@ private void runComputeOnDataNode( var externalSink = exchangeService.getSinkHandler(externalId); task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); + exchangeSource.addCompletionListener(refs.acquire()); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); ActionListener reductionListener = cancelOnFailure(task, cancelled, refs.acquire()); runCompute( @@ -854,6 +856,7 @@ void runComputeOnRemoteCluster( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new ComputeResponse(collectedProfiles))) ) { exchangeSink.addCompletionListener(refs.acquire()); + exchangeSource.addCompletionListener(refs.acquire()); PhysicalPlan coordinatorPlan = new ExchangeSinkExec( plan.source(), plan.output(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java index b72feadd20c61..ab2df4a2ba6a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java @@ -70,7 +70,7 @@ final class DataNodeRequest extends TransportRequest implements IndicesRequest { // TODO make EsqlConfiguration Releasable new BlockStreamInput(in, new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE)) ); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.clusterAlias = in.readString(); } else { this.clusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; @@ -85,7 +85,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(sessionId); configuration.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeString(clusterAlias); } out.writeCollection(shardIds); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 059eec771efe8..cf311d4413671 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -10,10 +10,23 @@ import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import java.util.Map; import java.util.Set; +/** + * {@link NodeFeature}s declared by ESQL. These should be used for fast checks + * on the node. Before the introduction of the {@link RestNodesCapabilitiesAction} + * this was used for controlling which features are tested so many of the + * examples below are *just* used for that. Don't make more of those - add them + * to {@link EsqlCapabilities} instead. + *

+ * NOTE: You can't remove a feature now and probably never will be able to. + * Only add more of these if you need a fast CPU level check. + *

+ */ public class EsqlFeatures implements FeatureSpecification { /** * Introduction of {@code MV_SORT}, {@code MV_SLICE}, and {@code MV_ZIP}. @@ -136,6 +149,12 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); + /** * Support for timespan units abbreviations */ @@ -174,7 +193,8 @@ public Map getHistoricalFeatures() { Map.entry(MV_WARN, Version.V_8_12_0), Map.entry(SPATIAL_POINTS, Version.V_8_12_0), Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0) + Map.entry(POW_DOUBLE, Version.V_8_12_0), + Map.entry(ENRICH_LOAD, Version.V_8_12_0) ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index c865b21723a9e..cb8700d5d7602 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -107,9 +107,12 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.cap; import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -144,7 +147,6 @@ public class CsvTests extends ESTestCase { private static final Logger LOGGER = LogManager.getLogger(CsvTests.class); - private static final String IGNORED_CSV_FILE_NAMES_PATTERN = "-IT_tests_only"; private final String fileName; private final String groupName; @@ -164,10 +166,8 @@ public class CsvTests extends ESTestCase { @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { - List urls = classpathResources("/*.csv-spec").stream() - .filter(x -> x.toString().contains(IGNORED_CSV_FILE_NAMES_PATTERN) == false) - .toList(); - assertTrue("Not enough specs found " + urls, urls.size() > 0); + List urls = classpathResources("/*.csv-spec"); + assertThat("Not enough specs found " + urls, urls, hasSize(greaterThan(0))); return SpecReader.readScriptSpec(urls, specParser()); } @@ -223,7 +223,8 @@ public final void test() throws Throwable { * The csv tests support all but a few features. The unsupported features * are tested in integration tests. */ - assumeFalse("metadata fields aren't supported", testCase.requiredFeatures.contains(EsqlFeatures.METADATA_FIELDS.id())); + assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); + assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); doTest(); } catch (Throwable th) { throw reworkException(th); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 1a410c518e9b1..4867b0c62a18c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -166,14 +166,17 @@ protected static Iterable parameterSuppliersFromTypedData(List */ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -315,7 +320,13 @@ public final void testEvaluateBlockWithoutNulls() { * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -356,10 +367,13 @@ protected Matcher allNullsMatcher() { } private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - assumeTrue("Must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); - boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); @@ -389,7 +403,6 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } b++; } - Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); try ( ExpressionEvaluator eval = evaluator(expression).get(context); Block block = eval.eval(new Page(positions, manyPositionsBlocks)) @@ -415,13 +428,15 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } } - // TODO cranky time - public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); List simpleData = testCase.getDataValues(); - try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { + try (EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext())) { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); for (int i = 0; i < orig.length; i++) { @@ -460,12 +475,16 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo } public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); int count = 10_000; int threads = 5; - var evalSupplier = evaluator(buildFieldExpression(testCase)); + var evalSupplier = evaluator(expression); ExecutorService exec = Executors.newFixedThreadPool(threads); try { List> futures = new ArrayList<>(); @@ -492,17 +511,25 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru } public final void testEvaluatorToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); - var factory = evaluator(buildFieldExpression(testCase)); + var factory = evaluator(expression); try (ExpressionEvaluator ev = factory.get(driverContext())) { assertThat(ev.toString(), testCase.evaluatorToString()); } } public final void testFactoryToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); var factory = evaluator(buildFieldExpression(testCase)); assertThat(factory.toString(), testCase.evaluatorToString()); } @@ -510,8 +537,7 @@ public final void testFactoryToString() { public final void testFold() { Expression expression = buildLiteralExpression(testCase); if (testCase.getExpectedTypeError() != null) { - assertTrue(expression.typeResolved().unresolved()); - assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + assertTypeResolutionFailure(expression); return; } assertFalse(expression.typeResolved().unresolved()); @@ -1103,6 +1129,11 @@ protected static DataType[] representableNonSpatialTypes() { return representableNonSpatial().toArray(DataType[]::new); } + protected final void assertTypeResolutionFailure(Expression expression) { + assertTrue("expected unresolved", expression.typeResolved().unresolved()); + assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + } + @AfterClass public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { @@ -1543,17 +1574,18 @@ private static void writeToTempDir(String subdir, String str, String extension) private final List breakers = Collections.synchronizedList(new ArrayList<>()); protected final DriverContext driverContext() { - MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } protected final DriverContext crankyContext() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()) + .withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } @After diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 90692d5b19df1..ee23cf00a37a0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.math.BigInteger; import java.util.List; import java.util.function.Function; import java.util.function.Supplier; @@ -32,6 +33,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class CaseTests extends AbstractFunctionTestCase { @@ -44,26 +46,173 @@ public CaseTests(@Name("TestCase") Supplier testCaseS */ @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("basics", () -> { - List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), - new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") - ); - return new TestCaseSupplier.TestCase( - typedData, - "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " - + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(new BytesRef("a")) - ); - }))); + return parameterSuppliersFromTypedData( + List.of(new TestCaseSupplier("keyword", List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), + new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(new BytesRef("a")) + ); + }), new TestCaseSupplier("text", List.of(DataTypes.BOOLEAN, DataTypes.TEXT), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.TEXT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.TEXT, + nullValue() + ); + }), new TestCaseSupplier("boolean", List.of(DataTypes.BOOLEAN, DataTypes.BOOLEAN), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BOOLEAN, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.BOOLEAN, + nullValue() + ); + }), new TestCaseSupplier("date", List.of(DataTypes.BOOLEAN, DataTypes.DATETIME), () -> { + long value = randomNonNegativeLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DATETIME, + equalTo(value) + ); + }), new TestCaseSupplier("double", List.of(DataTypes.BOOLEAN, DataTypes.DOUBLE), () -> { + double value = randomDouble(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DOUBLE, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=DOUBLE, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DOUBLE, + equalTo(value) + ); + }), new TestCaseSupplier("integer", List.of(DataTypes.BOOLEAN, DataTypes.INTEGER), () -> { + int value = randomInt(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.INTEGER, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=INT, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.INTEGER, + nullValue() + ); + }), new TestCaseSupplier("long", List.of(DataTypes.BOOLEAN, DataTypes.LONG), () -> { + long value = randomLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.LONG, + nullValue() + ); + }), new TestCaseSupplier("unsigned_long", List.of(DataTypes.BOOLEAN, DataTypes.UNSIGNED_LONG), () -> { + BigInteger value = randomUnsignedLongBetween(BigInteger.ZERO, UNSIGNED_LONG_MAX); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.UNSIGNED_LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.UNSIGNED_LONG, + equalTo(value) + ); + }), new TestCaseSupplier("ip", List.of(DataTypes.BOOLEAN, DataTypes.IP), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.IP).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.IP, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.IP, + equalTo(value) + ); + }), new TestCaseSupplier("version", List.of(DataTypes.BOOLEAN, DataTypes.VERSION), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.VERSION).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.VERSION, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.VERSION, + nullValue() + ); + }), new TestCaseSupplier("cartesian_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.CARTESIAN_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.CARTESIAN_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.CARTESIAN_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.CARTESIAN_POINT, + nullValue() + ); + }), new TestCaseSupplier("geo_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.GEO_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.GEO_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.GEO_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.GEO_POINT, + equalTo(value) + ); + })) + ); } @Override protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { if (nullBlock == 0) { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() == 2) { + assertThat(value.isNull(0), equalTo(true)); + } else if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } return; } if (((Boolean) data.get(0)).booleanValue()) { @@ -77,7 +226,11 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo if (nullBlock == 2) { super.assertSimpleWithNulls(data, value, nullBlock); } else { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } else { + super.assertSimpleWithNulls(data, value, nullBlock); + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java new file mode 100644 index 0000000000000..b4f195c5929e3 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.matchesPattern; + +public class NowTests extends AbstractConfigurationFunctionTestCase { + public NowTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + "Now Test", + () -> new TestCaseSupplier.TestCase( + List.of(), + matchesPattern("LiteralsEvaluator\\[lit=.*\\]"), + DataTypes.DATETIME, + equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) + ) + ) + ) + ); + } + + @Override + protected Expression buildWithConfiguration(Source source, List args, EsqlConfiguration configuration) { + return new Now(Source.EMPTY, configuration); + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assertThat(((LongBlock) value).asVector().getLong(0), equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli())); + } + + @Override + protected Matcher allNullsMatcher() { + return equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()); + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index cf0dfa372ea3f..ddd53cad8ec6d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.parser; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Randomness; import org.elasticsearch.core.Tuple; @@ -44,6 +45,7 @@ import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.plan.TableIdentifier; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -338,17 +340,17 @@ public void testInlineStatsWithoutGroups() { } public void testIdentifiersAsIndexPattern() { - assertIdentifierAsIndexPattern("foo", "from `foo`"); - assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); + // assertIdentifierAsIndexPattern("foo", "from `foo`"); + // assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); assertIdentifierAsIndexPattern("foo,test-*", "from foo,test-*"); assertIdentifierAsIndexPattern("123-test@foo_bar+baz1", "from 123-test@foo_bar+baz1"); - assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); + // assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); assertIdentifierAsIndexPattern("foo,test,xyz", "from foo, test,xyz"); assertIdentifierAsIndexPattern( - ",", - "from , ``" + "", // , + "from " // , `` ); } @@ -1049,6 +1051,147 @@ public void testInlineConvertUnsupportedType() { expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); } + public void testMetricsWithoutStats() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + + assertStatement("METRICS foo", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo"), List.of())); + assertStatement("METRICS foo,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of())); + assertStatement("METRICS foo*,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*,bar"), List.of())); + assertStatement("METRICS foo-*,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo-*,bar"), List.of())); + assertStatement( + "METRICS foo-*,bar+*", + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo-*,bar+*"), List.of()) + ); + } + + public void testMetricsIdentifiers() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + Map patterns = Map.of( + "metrics foo,test-*", + "foo,test-*", + "metrics 123-test@foo_bar+baz1", + "123-test@foo_bar+baz1", + "metrics foo, test,xyz", + "foo,test,xyz", + "metrics >", + ">" + ); + for (Map.Entry e : patterns.entrySet()) { + assertStatement(e.getKey(), new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, e.getValue()), List.of())); + } + } + + public void testSimpleMetricsWithStats() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + assertStatement( + "METRICS foo load=avg(cpu) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo"), List.of()), + List.of(attribute("ts")), + List.of(new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), attribute("ts")) + ) + ); + assertStatement( + "METRICS foo,bar load=avg(cpu) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of()), + List.of(attribute("ts")), + List.of(new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), attribute("ts")) + ) + ); + assertStatement( + "METRICS foo,bar load=avg(cpu),max(rate(requests)) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of()), + List.of(attribute("ts")), + List.of( + new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), + new Alias( + EMPTY, + "max(rate(requests))", + new UnresolvedFunction( + EMPTY, + "max", + DEFAULT, + List.of(new UnresolvedFunction(EMPTY, "rate", DEFAULT, List.of(attribute("requests")))) + ) + ), + attribute("ts") + ) + ) + ); + assertStatement( + "METRICS foo* count(errors)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "count(errors)", new UnresolvedFunction(EMPTY, "count", DEFAULT, List.of(attribute("errors"))))) + ) + ); + assertStatement( + "METRICS foo* a(b)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))) + ) + ); + assertStatement( + "METRICS foo* a(b)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))) + ) + ); + assertStatement( + "METRICS foo* a1(b2)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a1(b2)", new UnresolvedFunction(EMPTY, "a1", DEFAULT, List.of(attribute("b2"))))) + ) + ); + assertStatement( + "METRICS foo*,bar* b = min(a) by c, d.e", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*,bar*"), List.of()), + List.of(attribute("c"), attribute("d.e")), + List.of( + new Alias(EMPTY, "b", new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(attribute("a")))), + attribute("c"), + attribute("d.e") + ) + ) + ); + } + + public void testMetricWithGroupKeyAsAgg() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + var queries = List.of("METRICS foo a BY a"); + for (String query : queries) { + expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause"); + } + } + + private void assertStatement(String statement, LogicalPlan expected) { + final LogicalPlan actual; + try { + actual = statement(statement); + } catch (Exception e) { + throw new AssertionError("parsing error for [" + statement + "]", e); + } + assertThat(statement, actual, equalTo(expected)); + } + private LogicalPlan statement(String e) { return statement(e, List.of()); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java index 6061b6db89724..87c93a9198215 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java @@ -212,6 +212,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private PartialStepKey nextStepKey; public Request(String index, Step.StepKey currentStepKey, PartialStepKey nextStepKey) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.index = index; this.currentStepKey = currentStepKey; this.nextStepKey = nextStepKey; @@ -224,7 +225,9 @@ public Request(StreamInput in) throws IOException { this.nextStepKey = new PartialStepKey(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getIndex() { return index; diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java index 5818ce6582bef..95358adb832c7 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java @@ -118,6 +118,7 @@ public static class Request extends AcknowledgedRequest implements Indi private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen(); public Request(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } @@ -127,7 +128,9 @@ public Request(StreamInput in) throws IOException { this.indicesOptions = IndicesOptions.readIndicesOptions(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public Request indices(String... indices) { diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 0aef8601ffcc6..5429e46a1d3fe 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,13 +4,15 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' restResources { restApi { - include '_common', 'indices', 'inference', 'index' + include '_common', 'bulk', 'indices', 'inference', 'index', 'get', 'update', 'reindex', 'search' } } @@ -36,6 +38,12 @@ dependencies { api "com.ibm.icu:icu4j:${versions.icu4j}" } +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("test").configure { + systemProperty 'es.semantic_text_feature_flag_enabled', 'true' + } +} + tasks.named('yamlRestTest') { usesDefaultDistribution() } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java index 41b0ed3c0314e..833b1fd3673fc 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java @@ -27,9 +27,16 @@ public void testMockService() throws IOException { assertEquals("text_embedding_test_service", modelMap.get("service")); } - // The response is randomly generated, the input can be anything - var inference = inferOnMockService(inferenceEntityId, List.of(randomAlphaOfLength(10))); + List input = List.of(randomAlphaOfLength(10)); + var inference = inferOnMockService(inferenceEntityId, input); assertNonEmptyInferenceResults(inference, 1, TaskType.TEXT_EMBEDDING); + // Same input should return the same result + assertEquals(inference, inferOnMockService(inferenceEntityId, input)); + // Different input values should not + assertNotEquals( + inference, + inferOnMockService(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10)))) + ); } public void testMockServiceWithMultipleInputs() throws IOException { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java index c580d72fcc405..97e0641f37c33 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java @@ -29,9 +29,16 @@ public void testMockService() throws IOException { assertEquals("test_service", modelMap.get("service")); } - // The response is randomly generated, the input can be anything - var inference = inferOnMockService(inferenceEntityId, List.of(randomAlphaOfLength(10))); + List input = List.of(randomAlphaOfLength(10)); + var inference = inferOnMockService(inferenceEntityId, input); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); + // Same input should return the same result + assertEquals(inference, inferOnMockService(inferenceEntityId, input)); + // Different input values should not + assertNotEquals( + inference, + inferOnMockService(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10)))) + ); } public void testMockServiceWithMultipleInputs() throws IOException { diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index fe08db9b94b89..ecfec2304c8a1 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.upgrades.ParameterizedRollingUpgradeTestCase; +import org.elasticsearch.upgrades.AbstractRollingUpgradeTestCase; import java.io.IOException; import java.util.List; @@ -21,7 +21,7 @@ import static org.elasticsearch.core.Strings.format; -public class InferenceUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { +public class InferenceUpgradeTestCase extends AbstractRollingUpgradeTestCase { public InferenceUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java index 99dfc9582eb05..1bde3704864d5 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java @@ -27,6 +27,14 @@ public abstract class AbstractTestInferenceService implements InferenceService { + protected static int stringWeight(String input, int position) { + int hashCode = input.hashCode(); + if (hashCode < 0) { + hashCode = -hashCode; + } + return hashCode + position; + } + @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests @@ -101,11 +109,6 @@ public TestServiceModel( super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); } - @Override - public TestDenseInferenceServiceExtension.TestServiceSettings getServiceSettings() { - return (TestDenseInferenceServiceExtension.TestServiceSettings) super.getServiceSettings(); - } - @Override public TestTaskSettings getTaskSettings() { return (TestTaskSettings) super.getTaskSettings(); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index c81dbdc45463c..a54b14d8fad18 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -124,7 +124,7 @@ private TextEmbeddingResults makeResults(List input, int dimensions) { for (int i = 0; i < input.size(); i++) { List values = new ArrayList<>(); for (int j = 0; j < dimensions; j++) { - values.add((float) j); + values.add((float) stringWeight(input.get(i), j)); } embeddings.add(new TextEmbeddingResults.Embedding(values)); } @@ -135,8 +135,8 @@ private List makeChunkedResults(List inp var results = new ArrayList(); for (int i = 0; i < input.size(); i++) { double[] values = new double[dimensions]; - for (int j = 0; j < 5; j++) { - values[j] = j; + for (int j = 0; j < dimensions; j++) { + values[j] = stringWeight(input.get(i), j); } results.add( new org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults( @@ -172,7 +172,7 @@ public static TestServiceSettings fromMap(Map map) { SimilarityMeasure similarity = null; String similarityStr = (String) map.remove("similarity"); if (similarityStr != null) { - similarity = SimilarityMeasure.valueOf(similarityStr); + similarity = SimilarityMeasure.fromString(similarityStr); } return new TestServiceSettings(model, dimensions, similarity); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index b13e65d1ba802..42b8ccd11a64b 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -121,7 +121,7 @@ private SparseEmbeddingResults makeResults(List input) { for (int i = 0; i < input.size(); i++) { var tokens = new ArrayList(); for (int j = 0; j < 5; j++) { - tokens.add(new SparseEmbeddingResults.WeightedToken(Integer.toString(j), (float) j)); + tokens.add(new SparseEmbeddingResults.WeightedToken("feature_" + j, stringWeight(input.get(i), j))); } embeddings.add(new SparseEmbeddingResults.Embedding(tokens, false)); } @@ -129,21 +129,22 @@ private SparseEmbeddingResults makeResults(List input) { } private List makeChunkedResults(List input) { - var chunks = new ArrayList(); + List results = new ArrayList<>(); for (int i = 0; i < input.size(); i++) { var tokens = new ArrayList(); for (int j = 0; j < 5; j++) { - tokens.add(new TextExpansionResults.WeightedToken(Integer.toString(j), (float) j)); + tokens.add(new TextExpansionResults.WeightedToken("feature_" + j, stringWeight(input.get(i), j))); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens)); + results.add( + new ChunkedSparseEmbeddingResults(List.of(new ChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens))) + ); } - return List.of(new ChunkedSparseEmbeddingResults(chunks)); + return results; } protected ServiceSettings getServiceSettingsFromMap(Map serviceSettingsMap) { return TestServiceSettings.fromMap(serviceSettingsMap); } - } public record TestServiceSettings(String model, String hiddenField, boolean shouldReturnHiddenField) implements ServiceSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 8d01b25aa2795..41bef3521cdf2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -26,6 +26,8 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionServiceSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionTaskSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; @@ -237,6 +239,21 @@ public static List getNamedWriteables() { ) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + AzureOpenAiCompletionServiceSettings.NAME, + AzureOpenAiCompletionServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + TaskSettings.class, + AzureOpenAiCompletionTaskSettings.NAME, + AzureOpenAiCompletionTaskSettings::new + ) + ); + return namedWriteables; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 1afe3c891db80..34459c3beff95 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -45,6 +46,7 @@ import org.elasticsearch.xpack.inference.action.TransportInferenceAction; import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; +import org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; @@ -76,6 +78,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static java.util.Collections.singletonList; + public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, MapperPlugin { /** @@ -101,6 +105,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); + private final SetOnce shardBulkInferenceActionFilter = new SetOnce<>(); private List inferenceServiceExtensions; public InferencePlugin(Settings settings) { @@ -166,6 +171,9 @@ public Collection createComponents(PluginServices services) { registry.init(services.client()); inferenceServiceRegistry.set(registry); + var actionFilter = new ShardBulkInferenceActionFilter(registry, modelRegistry); + shardBulkInferenceActionFilter.set(actionFilter); + return List.of(modelRegistry, registry); } @@ -272,4 +280,12 @@ public Map getMappers() { } return Map.of(); } + + @Override + public Collection getActionFilters() { + if (SemanticTextFeature.isEnabled()) { + return singletonList(shardBulkInferenceActionFilter.get()); + } + return List.of(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java new file mode 100644 index 0000000000000..38d8b8d9b35c0 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -0,0 +1,536 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action.filter; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkItemRequest; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.bulk.TransportShardBulkAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.support.MappedActionFilter; +import org.elasticsearch.action.support.RefCountingRunnable; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.inference.mapper.SemanticTextField; +import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.toSemanticTextFieldChunks; + +/** + * A {@link MappedActionFilter} that intercepts {@link BulkShardRequest} to apply inference on fields specified + * as {@link SemanticTextFieldMapper} in the index mapping. For each semantic text field referencing fields in + * the request source, we generate embeddings and include the results in the source under the semantic text field + * name as a {@link SemanticTextField}. + * This transformation happens on the bulk coordinator node, and the {@link SemanticTextFieldMapper} parses the + * results during indexing on the shard. + * + * TODO: batchSize should be configurable via a cluster setting + */ +public class ShardBulkInferenceActionFilter implements MappedActionFilter { + protected static final int DEFAULT_BATCH_SIZE = 512; + + private final InferenceServiceRegistry inferenceServiceRegistry; + private final ModelRegistry modelRegistry; + private final int batchSize; + + public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry) { + this(inferenceServiceRegistry, modelRegistry, DEFAULT_BATCH_SIZE); + } + + public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry, int batchSize) { + this.inferenceServiceRegistry = inferenceServiceRegistry; + this.modelRegistry = modelRegistry; + this.batchSize = batchSize; + } + + @Override + public int order() { + // must execute last (after the security action filter) + return Integer.MAX_VALUE; + } + + @Override + public String actionName() { + return TransportShardBulkAction.ACTION_NAME; + } + + @Override + public void apply( + Task task, + String action, + Request request, + ActionListener listener, + ActionFilterChain chain + ) { + if (TransportShardBulkAction.ACTION_NAME.equals(action)) { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + var fieldInferenceMetadata = bulkShardRequest.consumeInferenceFieldMap(); + if (fieldInferenceMetadata != null && fieldInferenceMetadata.isEmpty() == false) { + Runnable onInferenceCompletion = () -> chain.proceed(task, action, request, listener); + processBulkShardRequest(fieldInferenceMetadata, bulkShardRequest, onInferenceCompletion); + return; + } + } + chain.proceed(task, action, request, listener); + } + + private void processBulkShardRequest( + Map fieldInferenceMap, + BulkShardRequest bulkShardRequest, + Runnable onCompletion + ) { + new AsyncBulkShardInferenceAction(fieldInferenceMap, bulkShardRequest, onCompletion).run(); + } + + private record InferenceProvider(InferenceService service, Model model) {} + + /** + * A field inference request on a single input. + * @param index The index of the request in the original bulk request. + * @param field The target field. + * @param input The input to run inference on. + * @param inputOrder The original order of the input. + * @param isOriginalFieldInput Whether the input is part of the original values of the field. + */ + private record FieldInferenceRequest(int index, String field, String input, int inputOrder, boolean isOriginalFieldInput) {} + + /** + * The field inference response. + * @param field The target field. + * @param input The input that was used to run inference. + * @param inputOrder The original order of the input. + * @param isOriginalFieldInput Whether the input is part of the original values of the field. + * @param model The model used to run inference. + * @param chunkedResults The actual results. + */ + private record FieldInferenceResponse( + String field, + String input, + int inputOrder, + boolean isOriginalFieldInput, + Model model, + ChunkedInferenceServiceResults chunkedResults + ) {} + + private record FieldInferenceResponseAccumulator( + int id, + Map> responses, + List failures + ) { + void addOrUpdateResponse(FieldInferenceResponse response) { + synchronized (this) { + var list = responses.computeIfAbsent(response.field, k -> new ArrayList<>()); + list.add(response); + } + } + + void addFailure(Exception exc) { + synchronized (this) { + failures.add(exc); + } + } + } + + private class AsyncBulkShardInferenceAction implements Runnable { + private final Map fieldInferenceMap; + private final BulkShardRequest bulkShardRequest; + private final Runnable onCompletion; + private final AtomicArray inferenceResults; + + private AsyncBulkShardInferenceAction( + Map fieldInferenceMap, + BulkShardRequest bulkShardRequest, + Runnable onCompletion + ) { + this.fieldInferenceMap = fieldInferenceMap; + this.bulkShardRequest = bulkShardRequest; + this.inferenceResults = new AtomicArray<>(bulkShardRequest.items().length); + this.onCompletion = onCompletion; + } + + @Override + public void run() { + Map> inferenceRequests = createFieldInferenceRequests(bulkShardRequest); + Runnable onInferenceCompletion = () -> { + try { + for (var inferenceResponse : inferenceResults.asList()) { + var request = bulkShardRequest.items()[inferenceResponse.id]; + try { + applyInferenceResponses(request, inferenceResponse); + } catch (Exception exc) { + request.abort(bulkShardRequest.index(), exc); + } + } + } finally { + onCompletion.run(); + } + }; + try (var releaseOnFinish = new RefCountingRunnable(onInferenceCompletion)) { + for (var entry : inferenceRequests.entrySet()) { + executeShardBulkInferenceAsync(entry.getKey(), null, entry.getValue(), releaseOnFinish.acquire()); + } + } + } + + private void executeShardBulkInferenceAsync( + final String inferenceId, + @Nullable InferenceProvider inferenceProvider, + final List requests, + final Releasable onFinish + ) { + if (inferenceProvider == null) { + ActionListener modelLoadingListener = new ActionListener<>() { + @Override + public void onResponse(ModelRegistry.UnparsedModel unparsedModel) { + var service = inferenceServiceRegistry.getService(unparsedModel.service()); + if (service.isEmpty() == false) { + var provider = new InferenceProvider( + service.get(), + service.get() + .parsePersistedConfigWithSecrets( + inferenceId, + unparsedModel.taskType(), + unparsedModel.settings(), + unparsedModel.secrets() + ) + ); + executeShardBulkInferenceAsync(inferenceId, provider, requests, onFinish); + } else { + try (onFinish) { + for (FieldInferenceRequest request : requests) { + inferenceResults.get(request.index).failures.add( + new ResourceNotFoundException( + "Inference service [{}] not found for field [{}]", + unparsedModel.service(), + request.field + ) + ); + } + } + } + } + + @Override + public void onFailure(Exception exc) { + try (onFinish) { + for (FieldInferenceRequest request : requests) { + Exception failure; + if (ExceptionsHelper.unwrap(exc, ResourceNotFoundException.class) instanceof ResourceNotFoundException) { + failure = new ResourceNotFoundException( + "Inference id [{}] not found for field [{}]", + inferenceId, + request.field + ); + } else { + failure = new ElasticsearchException( + "Error loading inference for inference id [{}] on field [{}]", + exc, + inferenceId, + request.field + ); + } + inferenceResults.get(request.index).failures.add(failure); + } + } + } + }; + modelRegistry.getModelWithSecrets(inferenceId, modelLoadingListener); + return; + } + int currentBatchSize = Math.min(requests.size(), batchSize); + final List currentBatch = requests.subList(0, currentBatchSize); + final List nextBatch = requests.subList(currentBatchSize, requests.size()); + final List inputs = currentBatch.stream().map(FieldInferenceRequest::input).collect(Collectors.toList()); + ActionListener> completionListener = new ActionListener<>() { + @Override + public void onResponse(List results) { + try { + var requestsIterator = requests.iterator(); + for (ChunkedInferenceServiceResults result : results) { + var request = requestsIterator.next(); + var acc = inferenceResults.get(request.index); + if (result instanceof ErrorChunkedInferenceResults error) { + acc.addFailure( + new ElasticsearchException( + "Exception when running inference id [{}] on field [{}]", + error.getException(), + inferenceProvider.model.getInferenceEntityId(), + request.field + ) + ); + } else { + acc.addOrUpdateResponse( + new FieldInferenceResponse( + request.field(), + request.input(), + request.inputOrder(), + request.isOriginalFieldInput(), + inferenceProvider.model, + result + ) + ); + } + } + } finally { + onFinish(); + } + } + + @Override + public void onFailure(Exception exc) { + try { + for (FieldInferenceRequest request : requests) { + addInferenceResponseFailure( + request.index, + new ElasticsearchException( + "Exception when running inference id [{}] on field [{}]", + exc, + inferenceProvider.model.getInferenceEntityId(), + request.field + ) + ); + } + } finally { + onFinish(); + } + } + + private void onFinish() { + if (nextBatch.isEmpty()) { + onFinish.close(); + } else { + executeShardBulkInferenceAsync(inferenceId, inferenceProvider, nextBatch, onFinish); + } + } + }; + inferenceProvider.service() + .chunkedInfer( + inferenceProvider.model(), + null, + inputs, + Map.of(), + InputType.INGEST, + new ChunkingOptions(null, null), + TimeValue.MAX_VALUE, + completionListener + ); + } + + private FieldInferenceResponseAccumulator ensureResponseAccumulatorSlot(int id) { + FieldInferenceResponseAccumulator acc = inferenceResults.get(id); + if (acc == null) { + acc = new FieldInferenceResponseAccumulator(id, new HashMap<>(), new ArrayList<>()); + inferenceResults.set(id, acc); + } + return acc; + } + + private void addInferenceResponseFailure(int id, Exception failure) { + var acc = ensureResponseAccumulatorSlot(id); + acc.addFailure(failure); + } + + /** + * Applies the {@link FieldInferenceResponseAccumulator} to the provided {@link BulkItemRequest}. + * If the response contains failures, the bulk item request is marked as failed for the downstream action. + * Otherwise, the source of the request is augmented with the field inference results under the + * {@link SemanticTextField#INFERENCE_FIELD} field. + */ + private void applyInferenceResponses(BulkItemRequest item, FieldInferenceResponseAccumulator response) { + if (response.failures().isEmpty() == false) { + for (var failure : response.failures()) { + item.abort(item.index(), failure); + } + return; + } + + final IndexRequest indexRequest = getIndexRequestOrNull(item.request()); + var newDocMap = indexRequest.sourceAsMap(); + for (var entry : response.responses.entrySet()) { + var fieldName = entry.getKey(); + var responses = entry.getValue(); + var model = responses.get(0).model(); + // ensure that the order in the original field is consistent in case of multiple inputs + Collections.sort(responses, Comparator.comparingInt(FieldInferenceResponse::inputOrder)); + List inputs = responses.stream().filter(r -> r.isOriginalFieldInput).map(r -> r.input).collect(Collectors.toList()); + List results = responses.stream().map(r -> r.chunkedResults).collect(Collectors.toList()); + var result = new SemanticTextField( + fieldName, + inputs, + new SemanticTextField.InferenceResult( + model.getInferenceEntityId(), + new SemanticTextField.ModelSettings(model), + toSemanticTextFieldChunks(fieldName, model.getInferenceEntityId(), results, indexRequest.getContentType()) + ), + indexRequest.getContentType() + ); + newDocMap.put(fieldName, result); + } + indexRequest.source(newDocMap, indexRequest.getContentType()); + } + + /** + * Register a {@link FieldInferenceRequest} for every non-empty field referencing an inference ID in the index. + * If results are already populated for fields in the original index request, the inference request for this specific + * field is skipped, and the existing results remain unchanged. + * Validation of inference ID and model settings occurs in the {@link SemanticTextFieldMapper} during field indexing, + * where an error will be thrown if they mismatch or if the content is malformed. + *

+ * TODO: We should validate the settings for pre-existing results here and apply the inference only if they differ? + */ + private Map> createFieldInferenceRequests(BulkShardRequest bulkShardRequest) { + Map> fieldRequestsMap = new LinkedHashMap<>(); + int itemIndex = 0; + for (var item : bulkShardRequest.items()) { + if (item.getPrimaryResponse() != null) { + // item was already aborted/processed by a filter in the chain upstream (e.g. security) + continue; + } + boolean isUpdateRequest = false; + final IndexRequest indexRequest; + if (item.request() instanceof IndexRequest ir) { + indexRequest = ir; + } else if (item.request() instanceof UpdateRequest updateRequest) { + isUpdateRequest = true; + if (updateRequest.script() != null) { + addInferenceResponseFailure( + item.id(), + new ElasticsearchStatusException( + "Cannot apply update with a script on indices that contain [{}] field(s)", + RestStatus.BAD_REQUEST, + SemanticTextFieldMapper.CONTENT_TYPE + ) + ); + continue; + } + indexRequest = updateRequest.doc(); + } else { + // ignore delete request + continue; + } + final Map docMap = indexRequest.sourceAsMap(); + for (var entry : fieldInferenceMap.values()) { + String field = entry.getName(); + String inferenceId = entry.getInferenceId(); + var originalFieldValue = XContentMapValues.extractValue(field, docMap); + if (originalFieldValue instanceof Map) { + continue; + } + int order = 0; + for (var sourceField : entry.getSourceFields()) { + boolean isOriginalFieldInput = sourceField.equals(field); + var valueObj = XContentMapValues.extractValue(sourceField, docMap); + if (valueObj == null) { + if (isUpdateRequest) { + addInferenceResponseFailure( + item.id(), + new ElasticsearchStatusException( + "Field [{}] must be specified on an update request to calculate inference for field [{}]", + RestStatus.BAD_REQUEST, + sourceField, + field + ) + ); + break; + } + continue; + } + ensureResponseAccumulatorSlot(itemIndex); + final List values; + try { + values = nodeStringValues(field, valueObj); + } catch (Exception exc) { + addInferenceResponseFailure(item.id(), exc); + break; + } + List fieldRequests = fieldRequestsMap.computeIfAbsent(inferenceId, k -> new ArrayList<>()); + for (var v : values) { + fieldRequests.add(new FieldInferenceRequest(itemIndex, field, v, order++, isOriginalFieldInput)); + } + } + } + itemIndex++; + } + return fieldRequestsMap; + } + } + + /** + * This method converts the given {@code valueObj} into a list of strings. + * If {@code valueObj} is not a string or a collection of strings, it throws an ElasticsearchStatusException. + */ + private static List nodeStringValues(String field, Object valueObj) { + if (valueObj instanceof String value) { + return List.of(value); + } else if (valueObj instanceof Collection values) { + List valuesString = new ArrayList<>(); + for (var v : values) { + if (v instanceof String value) { + valuesString.add(value); + } else { + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + } + return valuesString; + } + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + + static IndexRequest getIndexRequestOrNull(DocWriteRequest docWriteRequest) { + if (docWriteRequest instanceof IndexRequest indexRequest) { + return indexRequest; + } else if (docWriteRequest instanceof UpdateRequest updateRequest) { + return updateRequest.doc(); + } else { + return null; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java index 39eaaceae08bc..73ba286c9031a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; @@ -32,4 +33,10 @@ public ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings) { + var overriddenModel = AzureOpenAiCompletionModel.of(model, taskSettings); + return new AzureOpenAiCompletionAction(sender, overriddenModel, serviceComponents); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java index 49d1ce61b12dd..f45c1d797085e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.inference.external.action.azureopenai; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; public interface AzureOpenAiActionVisitor { ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(AzureOpenAiCompletionModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java new file mode 100644 index 0000000000000..d38d02ef9620f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.AzureOpenAiCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class AzureOpenAiCompletionAction implements ExecutableAction { + + private final String errorMessage; + private final AzureOpenAiCompletionRequestManager requestCreator; + private final Sender sender; + + public AzureOpenAiCompletionAction(Sender sender, AzureOpenAiCompletionModel model, ServiceComponents serviceComponents) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.requestCreator = new AzureOpenAiCompletionRequestManager(model, serviceComponents.threadPool()); + this.errorMessage = constructFailedToSendRequestMessage(model.getUri(), "Azure OpenAI completion"); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { + listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); + return; + } + + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("Azure OpenAI completion only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestCreator, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java index 5d75adedddde0..e11e9d5ad8cc9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -44,16 +44,17 @@ public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model @Override public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { - if (inferenceInputs instanceof DocumentsOnlyInput docsOnlyInput) { - if (docsOnlyInput.getInputs().size() > 1) { - listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); - return; - } - } else { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); return; } + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java new file mode 100644 index 0000000000000..2811155f6f357 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.azureopenai.AzureOpenAiCompletionResponseEntity; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class AzureOpenAiCompletionRequestManager extends AzureOpenAiRequestManager { + + private static final Logger logger = LogManager.getLogger(AzureOpenAiCompletionRequestManager.class); + + private static final ResponseHandler HANDLER = createCompletionHandler(); + + private final AzureOpenAiCompletionModel model; + + private static ResponseHandler createCompletionHandler() { + return new AzureOpenAiResponseHandler("azure openai completion", AzureOpenAiCompletionResponseEntity::fromResponse); + } + + public AzureOpenAiCompletionRequestManager(AzureOpenAiCompletionModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = Objects.requireNonNull(model); + } + + @Override + public Runnable create( + @Nullable String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + AzureOpenAiCompletionRequest request = new AzureOpenAiCompletionRequest(input, model); + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java new file mode 100644 index 0000000000000..8854dc7950365 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class AzureOpenAiCompletionRequest implements AzureOpenAiRequest { + + private final List input; + + private final URI uri; + + private final AzureOpenAiCompletionModel model; + + public AzureOpenAiCompletionRequest(List input, AzureOpenAiCompletionModel model) { + this.input = input; + this.model = Objects.requireNonNull(model); + this.uri = model.getUri(); + } + + @Override + public HttpRequest createHttpRequest() { + var httpPost = new HttpPost(uri); + var requestEntity = Strings.toString(new AzureOpenAiCompletionRequestEntity(input, model.getTaskSettings().user())); + + ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); + httpPost.setEntity(byteEntity); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public URI getURI() { + return this.uri; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public Request truncate() { + // No truncation for Azure OpenAI completion + return this; + } + + @Override + public boolean[] getTruncationInfo() { + // No truncation for Azure OpenAI completion + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java new file mode 100644 index 0000000000000..86614ef32855f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record AzureOpenAiCompletionRequestEntity(List messages, @Nullable String user) implements ToXContentObject { + + private static final String NUMBER_OF_RETURNED_CHOICES_FIELD = "n"; + + private static final String MESSAGES_FIELD = "messages"; + + private static final String ROLE_FIELD = "role"; + + private static final String CONTENT_FIELD = "content"; + + private static final String USER_FIELD = "user"; + + public AzureOpenAiCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(MESSAGES_FIELD); + + { + for (String message : messages) { + builder.startObject(); + + { + builder.field(ROLE_FIELD, USER_FIELD); + builder.field(CONTENT_FIELD, message); + } + + builder.endObject(); + } + } + + builder.endArray(); + + builder.field(NUMBER_OF_RETURNED_CHOICES_FIELD, 1); + + if (Strings.isNullOrEmpty(user) == false) { + builder.field(USER_FIELD, user); + } + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java index f20398fec0e57..00af244fca913 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java @@ -7,13 +7,9 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.message.BasicHeader; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; @@ -23,14 +19,7 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; -import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; - public class AzureOpenAiEmbeddingsRequest implements AzureOpenAiRequest { - private static final String MISSING_AUTHENTICATION_ERROR_MESSAGE = - "The request does not have any authentication methods set. One of [%s] or [%s] is required."; private final Truncator truncator; private final Truncator.TruncationResult truncationResult; @@ -59,21 +48,7 @@ public HttpRequest createHttpRequest() { ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(byteEntity); - httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); - - var entraId = model.getSecretSettings().entraId(); - var apiKey = model.getSecretSettings().apiKey(); - - if (entraId != null && entraId.isEmpty() == false) { - httpPost.setHeader(createAuthBearerHeader(entraId)); - } else if (apiKey != null && apiKey.isEmpty() == false) { - httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); - } else { - // should never happen due to the checks on the secret settings, but just in case - ValidationException validationException = new ValidationException(); - validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); - throw validationException; - } + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); return new HttpRequest(httpPost, getInferenceEntityId()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java index edb7c70b3903e..79a0e4a4eba33 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java @@ -7,6 +7,40 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.message.BasicHeader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -public interface AzureOpenAiRequest extends Request {} +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; + +public interface AzureOpenAiRequest extends Request { + + String MISSING_AUTHENTICATION_ERROR_MESSAGE = + "The request does not have any authentication methods set. One of [%s] or [%s] is required."; + + static void decorateWithAuthHeader(HttpPost httpPost, AzureOpenAiSecretSettings secretSettings) { + httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); + + var entraId = secretSettings.entraId(); + var apiKey = secretSettings.apiKey(); + + if (entraId != null && entraId.isEmpty() == false) { + httpPost.setHeader(createAuthBearerHeader(entraId)); + } else if (apiKey != null && apiKey.isEmpty() == false) { + httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); + } else { + // should never happen due to the checks on the secret settings, but just in case + ValidationException validationException = new ValidationException(); + validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); + throw validationException; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java index 16a02a4c06c1c..6e657640e27ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java @@ -13,6 +13,8 @@ public class AzureOpenAiUtils { public static final String OPENAI_PATH = "openai"; public static final String DEPLOYMENTS_PATH = "deployments"; public static final String EMBEDDINGS_PATH = "embeddings"; + public static final String CHAT_PATH = "chat"; + public static final String COMPLETIONS_PATH = "completions"; public static final String API_VERSION_PARAMETER = "api-version"; public static final String API_KEY_HEADER = "api-key"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 42fd0ddc812ec..55a7f35710cf6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -39,7 +39,7 @@ public static void moveToFirstToken(XContentParser parser) throws IOException { public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { XContentParser.Token token = parser.nextToken(); - while (token != null && token != XContentParser.Token.END_OBJECT) { + while (token != null) { if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { parser.nextToken(); return; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java new file mode 100644 index 0000000000000..ca1df7027cb40 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class AzureOpenAiCompletionResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Azure OpenAI completions response"; + + /** + * Parses the Azure OpenAI completion response. + * For a request like: + * + *

+     *     
+     *         {
+     *             "inputs": "Please summarize this text: some text"
+     *         }
+     *     
+     * 
+ * + * The response would look like: + * + *
+     *     
+     *         {
+     *     "choices": [
+     *         {
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             },
+     *             "finish_reason": "stop",
+     *             "index": 0,
+     *             "logprobs": null,
+     *             "message": {
+     *                 "content": "response",
+     *                 "role": "assistant"
+     *             }
+     *         }
+     *     ],
+     *     "created": 1714982782,
+     *     "id": "...",
+     *     "model": "gpt-4",
+     *     "object": "chat.completion",
+     *     "prompt_filter_results": [
+     *         {
+     *             "prompt_index": 0,
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             }
+     *         }
+     *     ],
+     *     "system_fingerprint": null,
+     *     "usage": { ... }
+     * }
+     *     
+     * 
+ */ + public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "choices", FAILED_TO_FIND_FIELD_TEMPLATE); + + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + + positionParserAtTokenAfterField(jsonParser, "message", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + String content = jsonParser.text(); + + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java index 93141727f705c..c9cc71b7fdcda 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java @@ -148,14 +148,12 @@ private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser p } if (index == -1) { - logger.error("Failed to find required field [index] in Cohere embeddings response"); + logger.warn("Failed to find required field [index] in Cohere rerank response"); } if (relevanceScore == -1) { - logger.error("Failed to find required field [relevance_score] in Cohere embeddings response"); - } - if (documentText == null) { - logger.error("Failed to find required field [document] in Cohere embeddings response"); + logger.warn("Failed to find required field [relevance_score] in Cohere rerank response"); } + // documentText may or may not be present depending on the request parameter return new RankedDocsResults.RankedDoc(index, relevanceScore, documentText); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 6f9e32e32f667..47c7cc0fce015 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -60,16 +60,42 @@ public static T removeAsType(Map sourceMap, String key, Clas if (type.isAssignableFrom(o.getClass())) { return (T) o; } else { - throw new ElasticsearchStatusException( - "field [{}] is not of the expected type." + " The value [{}] cannot be converted to a [{}]", - RestStatus.BAD_REQUEST, - key, - o, - type.getSimpleName() - ); + throw new ElasticsearchStatusException(invalidTypeErrorMsg(key, o, type.getSimpleName()), RestStatus.BAD_REQUEST); } } + /** + * Remove the object from the map and cast to the expected type. + * If the object cannot be cast to type and error is added to the + * {@code validationException} parameter + * + * @param sourceMap Map containing fields + * @param key The key of the object to remove + * @param type The expected type of the removed object + * @param validationException If the value is not of type {@code type} + * @return {@code null} if not present else the object cast to type T + * @param The expected type + */ + @SuppressWarnings("unchecked") + public static T removeAsType(Map sourceMap, String key, Class type, ValidationException validationException) { + Object o = sourceMap.remove(key); + if (o == null) { + return null; + } + + if (type.isAssignableFrom(o.getClass())) { + return (T) o; + } else { + validationException.addValidationError(invalidTypeErrorMsg(key, o, type.getSimpleName())); + return null; + } + } + + @SuppressWarnings("unchecked") + public static Map removeFromMap(Map sourceMap, String fieldName) { + return (Map) sourceMap.remove(fieldName); + } + @SuppressWarnings("unchecked") public static Map removeFromMapOrThrowIfNull(Map sourceMap, String fieldName) { Map value = (Map) sourceMap.remove(fieldName); @@ -116,6 +142,15 @@ public static String missingSettingErrorMsg(String settingName, String scope) { return Strings.format("[%s] does not contain the required setting [%s]", scope, settingName); } + public static String invalidTypeErrorMsg(String settingName, Object foundObject, String expectedType) { + return Strings.format( + "field [%s] is not of the expected type. The value [%s] cannot be converted to a [%s]", + settingName, + foundObject, + expectedType + ); + } + public static String invalidUrlErrorMsg(String url, String settingName, String settingScope) { return Strings.format("[%s] Invalid url [%s] received for field [%s]", settingScope, url, settingName); } @@ -230,7 +265,13 @@ public static String extractRequiredString( String scope, ValidationException validationException ) { - String requiredField = ServiceUtils.removeAsType(map, settingName, String.class); + int initialValidationErrorCount = validationException.validationErrors().size(); + String requiredField = ServiceUtils.removeAsType(map, settingName, String.class, validationException); + + if (validationException.validationErrors().size() > initialValidationErrorCount) { + // new validation error occurred + return null; + } if (requiredField == null) { validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(settingName, scope)); @@ -238,7 +279,7 @@ public static String extractRequiredString( validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } - if (validationException.validationErrors().isEmpty() == false) { + if (validationException.validationErrors().size() > initialValidationErrorCount) { return null; } @@ -251,13 +292,19 @@ public static String extractOptionalString( String scope, ValidationException validationException ) { - String optionalField = ServiceUtils.removeAsType(map, settingName, String.class); + int initialValidationErrorCount = validationException.validationErrors().size(); + String optionalField = ServiceUtils.removeAsType(map, settingName, String.class, validationException); + + if (validationException.validationErrors().size() > initialValidationErrorCount) { + // new validation error occurred + return null; + } if (optionalField != null && optionalField.isEmpty()) { validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } - if (validationException.validationErrors().isEmpty() == false) { + if (validationException.validationErrors().size() > initialValidationErrorCount) { return null; } @@ -270,13 +317,18 @@ public static Integer extractOptionalPositiveInteger( String scope, ValidationException validationException ) { - Integer optionalField = ServiceUtils.removeAsType(map, settingName, Integer.class); + int initialValidationErrorCount = validationException.validationErrors().size(); + Integer optionalField = ServiceUtils.removeAsType(map, settingName, Integer.class, validationException); + + if (validationException.validationErrors().size() > initialValidationErrorCount) { + return null; + } if (optionalField != null && optionalField <= 0) { validationException.addValidationError(ServiceUtils.mustBeAPositiveNumberErrorMessage(settingName, scope, optionalField)); } - if (validationException.validationErrors().isEmpty() == false) { + if (validationException.validationErrors().size() > initialValidationErrorCount) { return null; } @@ -309,19 +361,8 @@ public static > E extractOptionalEnum( return null; } - public static Boolean extractOptionalBoolean( - Map map, - String settingName, - String scope, - ValidationException validationException - ) { - Boolean optionalField = ServiceUtils.removeAsType(map, settingName, Boolean.class); - - if (validationException.validationErrors().isEmpty() == false) { - return null; - } - - return optionalField; + public static Boolean extractOptionalBoolean(Map map, String settingName, ValidationException validationException) { + return ServiceUtils.removeAsType(map, settingName, Boolean.class, validationException); } public static TimeValue extractOptionalTimeValue( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java index 5e50229e25643..708088af54cc2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.azureopenai; +import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -14,11 +15,18 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.core.Strings.format; + public abstract class AzureOpenAiModel extends Model { protected URI uri; @@ -50,6 +58,30 @@ protected AzureOpenAiModel(AzureOpenAiModel model, ServiceSettings serviceSettin public abstract ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings); + public final URI buildUriString() throws URISyntaxException { + return AzureOpenAiModel.buildUri(resourceName(), deploymentId(), apiVersion(), operationPathSegments()); + } + + // use only for testing directly + public static URI buildUri(String resourceName, String deploymentId, String apiVersion, String... pathSegments) + throws URISyntaxException { + String hostname = format("%s.%s", resourceName, AzureOpenAiUtils.HOST_SUFFIX); + + return new URIBuilder().setScheme("https") + .setHost(hostname) + .setPathSegments(createPathSegmentsList(deploymentId, pathSegments)) + .addParameter(AzureOpenAiUtils.API_VERSION_PARAMETER, apiVersion) + .build(); + } + + private static List createPathSegmentsList(String deploymentId, String[] pathSegments) { + List pathSegmentsList = new ArrayList<>( + List.of(AzureOpenAiUtils.OPENAI_PATH, AzureOpenAiUtils.DEPLOYMENTS_PATH, deploymentId) + ); + pathSegmentsList.addAll(Arrays.asList(pathSegments)); + return pathSegmentsList; + } + public URI getUri() { return uri; } @@ -62,4 +94,13 @@ public void setUri(URI newUri) { public AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings() { return rateLimitServiceSettings; } + + // TODO: can be inferred directly from modelConfigurations.getServiceSettings(); will be addressed with separate refactoring + public abstract String resourceName(); + + public abstract String deploymentId(); + + public abstract String apiVersion(); + + public abstract String[] operationPathSegments(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java index f871fe6c080a1..48e45f368bfe2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java @@ -25,12 +25,16 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalSecureString; -public record AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) implements SecretSettings { +public class AzureOpenAiSecretSettings implements SecretSettings { public static final String NAME = "azure_openai_secret_settings"; public static final String API_KEY = "api_key"; public static final String ENTRA_ID = "entra_id"; + private final SecureString entraId; + + private final SecureString apiKey; + public static AzureOpenAiSecretSettings fromMap(@Nullable Map map) { if (map == null) { return null; @@ -59,14 +63,24 @@ public static AzureOpenAiSecretSettings fromMap(@Nullable Map ma return new AzureOpenAiSecretSettings(secureApiToken, secureEntraId); } - public AzureOpenAiSecretSettings { + public AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) { Objects.requireNonNullElse(apiKey, entraId); + this.apiKey = apiKey; + this.entraId = entraId; } public AzureOpenAiSecretSettings(StreamInput in) throws IOException { this(in.readOptionalSecureString(), in.readOptionalSecureString()); } + public SecureString apiKey() { + return apiKey; + } + + public SecureString entraId() { + return entraId; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -98,4 +112,17 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalSecureString(apiKey); out.writeOptionalSecureString(entraId); } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiSecretSettings that = (AzureOpenAiSecretSettings) object; + return Objects.equals(entraId, that.entraId) && Objects.equals(apiKey, that.apiKey); + } + + @Override + public int hashCode() { + return Objects.hash(entraId, apiKey); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index c6b97e22b099d..e0e48ab20a86b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; @@ -121,19 +122,23 @@ private static AzureOpenAiModel createModel( String failureMessage, ConfigurationParseContext context ) { - if (taskType == TaskType.TEXT_EMBEDDING) { - return new AzureOpenAiEmbeddingsModel( - inferenceEntityId, - taskType, - NAME, - serviceSettings, - taskSettings, - secretSettings, - context - ); + switch (taskType) { + case TEXT_EMBEDDING -> { + return new AzureOpenAiEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); + } + case COMPLETION -> { + return new AzureOpenAiCompletionModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + } + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } - - throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java new file mode 100644 index 0000000000000..05cb663453542 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import java.net.URISyntaxException; +import java.util.Map; + +public class AzureOpenAiCompletionModel extends AzureOpenAiModel { + + public static AzureOpenAiCompletionModel of(AzureOpenAiCompletionModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap(taskSettings); + return new AzureOpenAiCompletionModel(model, AzureOpenAiCompletionTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + inferenceEntityId, + taskType, + service, + AzureOpenAiCompletionServiceSettings.fromMap(serviceSettings), + AzureOpenAiCompletionTaskSettings.fromMap(taskSettings), + AzureOpenAiSecretSettings.fromMap(secrets) + ); + } + + // Should only be used directly for testing + AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + AzureOpenAiCompletionServiceSettings serviceSettings, + AzureOpenAiCompletionTaskSettings taskSettings, + @Nullable AzureOpenAiSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); + try { + this.uri = buildUriString(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionServiceSettings serviceSettings) { + super(originalModel, serviceSettings); + } + + private AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionTaskSettings taskSettings) { + super(originalModel, taskSettings); + } + + @Override + public AzureOpenAiCompletionServiceSettings getServiceSettings() { + return (AzureOpenAiCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public AzureOpenAiCompletionTaskSettings getTaskSettings() { + return (AzureOpenAiCompletionTaskSettings) super.getTaskSettings(); + } + + @Override + public AzureOpenAiSecretSettings getSecretSettings() { + return (AzureOpenAiSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } + + @Override + public String resourceName() { + return getServiceSettings().resourceName(); + } + + @Override + public String deploymentId() { + return getServiceSettings().deploymentId(); + } + + @Override + public String apiVersion() { + return getServiceSettings().apiVersion(); + } + + @Override + public String[] operationPathSegments() { + return new String[] { AzureOpenAiUtils.CHAT_PATH, AzureOpenAiUtils.COMPLETIONS_PATH }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java new file mode 100644 index 0000000000000..5dd42bb1b911f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.USER; + +public record AzureOpenAiCompletionRequestTaskSettings(@Nullable String user) { + + public static final AzureOpenAiCompletionRequestTaskSettings EMPTY_SETTINGS = new AzureOpenAiCompletionRequestTaskSettings(null); + + public static AzureOpenAiCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionRequestTaskSettings(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java new file mode 100644 index 0000000000000..4100ce7358a3f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.DEPLOYMENT_ID; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.RESOURCE_NAME; + +public class AzureOpenAiCompletionServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + AzureOpenAiRateLimitServiceSettings { + + public static final String NAME = "azure_openai_completions_service_settings"; + + /** + * Rate limit documentation can be found here: + * + * Limits per region per model id + * https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits + * + * How to change the limits + * https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest + * + * Blog giving some examples + * https://techcommunity.microsoft.com/t5/fasttrack-for-azure/optimizing-azure-openai-a-guide-to-limits-quotas-and-best/ba-p/4076268 + * + * According to the docs 1000 tokens per minute (TPM) = 6 requests per minute (RPM). The limits change depending on the region + * and model. The lowest chat completions limit is 20k TPM, so we'll default to that. + * Calculation: 20K TPM = 20 * 6 = 120 requests per minute (used `francecentral` and `gpt-4` as basis for the calculation). + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static AzureOpenAiCompletionServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + var settings = fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionServiceSettings(settings); + } + + private static AzureOpenAiCompletionServiceSettings.CommonFields fromMap( + Map map, + ValidationException validationException + ) { + String resourceName = extractRequiredString(map, RESOURCE_NAME, ModelConfigurations.SERVICE_SETTINGS, validationException); + String deploymentId = extractRequiredString(map, DEPLOYMENT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + + return new AzureOpenAiCompletionServiceSettings.CommonFields(resourceName, deploymentId, apiVersion, rateLimitSettings); + } + + private record CommonFields(String resourceName, String deploymentId, String apiVersion, RateLimitSettings rateLimitSettings) {} + + private final String resourceName; + private final String deploymentId; + private final String apiVersion; + + private final RateLimitSettings rateLimitSettings; + + public AzureOpenAiCompletionServiceSettings( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.resourceName = resourceName; + this.deploymentId = deploymentId; + this.apiVersion = apiVersion; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public AzureOpenAiCompletionServiceSettings(StreamInput in) throws IOException { + resourceName = in.readString(); + deploymentId = in.readString(); + apiVersion = in.readString(); + rateLimitSettings = new RateLimitSettings(in); + } + + private AzureOpenAiCompletionServiceSettings(AzureOpenAiCompletionServiceSettings.CommonFields fields) { + this(fields.resourceName, fields.deploymentId, fields.apiVersion, fields.rateLimitSettings); + } + + public String resourceName() { + return resourceName; + } + + public String deploymentId() { + return deploymentId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return DEFAULT_RATE_LIMIT_SETTINGS; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field(RESOURCE_NAME, resourceName); + builder.field(DEPLOYMENT_ID, deploymentId); + builder.field(API_VERSION, apiVersion); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(resourceName); + out.writeString(deploymentId); + out.writeString(apiVersion); + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionServiceSettings that = (AzureOpenAiCompletionServiceSettings) object; + return Objects.equals(resourceName, that.resourceName) + && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(resourceName, deploymentId, apiVersion, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java new file mode 100644 index 0000000000000..6e9f77e1ade21 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; + +public class AzureOpenAiCompletionTaskSettings implements TaskSettings { + + public static final String NAME = "azure_openai_completion_task_settings"; + + public static final String USER = "user"; + + public static AzureOpenAiCompletionTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionTaskSettings(user); + } + + private final String user; + + public static AzureOpenAiCompletionTaskSettings of( + AzureOpenAiCompletionTaskSettings originalSettings, + AzureOpenAiCompletionRequestTaskSettings requestSettings + ) { + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + return new AzureOpenAiCompletionTaskSettings(userToUse); + } + + public AzureOpenAiCompletionTaskSettings(@Nullable String user) { + this.user = user; + } + + public AzureOpenAiCompletionTaskSettings(StreamInput in) throws IOException { + this.user = in.readOptionalString(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (user != null) { + builder.field(USER, user); + } + } + builder.endObject(); + return builder; + } + + public String user() { + return user; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(user); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionTaskSettings that = (AzureOpenAiCompletionTaskSettings) object; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java index 93d1e31a3bed1..377bb33f58619 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; -import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -19,12 +18,9 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -import java.net.URI; import java.net.URISyntaxException; import java.util.Map; -import static org.elasticsearch.core.Strings.format; - public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { public static AzureOpenAiEmbeddingsModel of(AzureOpenAiEmbeddingsModel model, Map taskSettings) { @@ -70,7 +66,7 @@ public AzureOpenAiEmbeddingsModel( serviceSettings ); try { - this.uri = getEmbeddingsUri(serviceSettings.resourceName(), serviceSettings.deploymentId(), serviceSettings.apiVersion()); + this.uri = buildUriString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -104,17 +100,24 @@ public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 4c39d35e2ff03..d55615e9df48a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -202,7 +202,7 @@ public XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 00a406a7a3efa..685dac0f3877c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -169,7 +169,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java index 134cb29862e64..0a42df8c0bb41 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java @@ -174,7 +174,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java index 75588aa2b5036..82f2d0e6f7ada 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java @@ -49,7 +49,7 @@ public static CohereRerankTaskSettings fromMap(Map map) { return EMPTY_SETTINGS; } - Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, ModelConfigurations.TASK_SETTINGS, validationException); + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException); Integer topNDocumentsOnly = extractOptionalPositiveInteger( map, TOP_N_DOCS_ONLY, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index 86ac5bbaaa272..ba98090c92522 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,8 +18,6 @@ import java.io.IOException; import java.util.Map; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class CustomElandInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "custom_eland_model_internal_service_settings"; @@ -86,7 +85,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java index aa05af9461565..1f9ec163aa546 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java @@ -9,16 +9,32 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import java.util.Map; + import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; public class CustomElandModel extends ElasticsearchModel { + public static CustomElandModel build( + String inferenceEntityId, + TaskType taskType, + String service, + CustomElandInternalServiceSettings serviceSettings, + @Nullable TaskSettings taskSettings + ) { + return taskSettings == null + ? new CustomElandModel(inferenceEntityId, taskType, service, serviceSettings) + : new CustomElandModel(inferenceEntityId, taskType, service, serviceSettings, taskSettings); + } + public CustomElandModel( String inferenceEntityId, TaskType taskType, @@ -28,6 +44,16 @@ public CustomElandModel( super(inferenceEntityId, taskType, service, serviceSettings); } + private CustomElandModel( + String inferenceEntityId, + TaskType taskType, + String service, + CustomElandInternalServiceSettings serviceSettings, + TaskSettings taskSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, taskSettings); + } + @Override public CustomElandInternalServiceSettings getServiceSettings() { return (CustomElandInternalServiceSettings) super.getServiceSettings(); @@ -76,4 +102,11 @@ public void onFailure(Exception e) { }; } + public static TaskSettings taskSettingsFromMap(TaskType taskType, Map taskSettingsMap) { + if (TaskType.RERANK.equals(taskType)) { + return CustomElandRerankTaskSettings.defaultsFromMap(taskSettingsMap); + } + + return null; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java new file mode 100644 index 0000000000000..a82ffbba3d688 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; + +/** + * Defines the task settings for internal rerank service. + */ +public class CustomElandRerankTaskSettings implements TaskSettings { + + public static final String NAME = "custom_eland_rerank_task_settings"; + public static final String RETURN_DOCUMENTS = "return_documents"; + + static final CustomElandRerankTaskSettings DEFAULT_SETTINGS = new CustomElandRerankTaskSettings(Boolean.TRUE); + + public static CustomElandRerankTaskSettings defaultsFromMap(Map map) { + ValidationException validationException = new ValidationException(); + + if (map == null || map.isEmpty()) { + return DEFAULT_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException); + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + if (returnDocuments == null) { + returnDocuments = true; + } + + return new CustomElandRerankTaskSettings(returnDocuments); + } + + /** + * From map without any validation + * @param map source map + * @return Task settings + */ + public static CustomElandRerankTaskSettings fromMap(Map map) { + if (map == null || map.isEmpty()) { + return DEFAULT_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, new ValidationException()); + return new CustomElandRerankTaskSettings(returnDocuments); + } + + /** + * Return either the request or orignal settings by preferring non-null fields + * from the request settings over the original settings. + * + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @return Either {@code originalSettings} or {@code requestTaskSettings} + */ + public static CustomElandRerankTaskSettings of( + CustomElandRerankTaskSettings originalSettings, + CustomElandRerankTaskSettings requestTaskSettings + ) { + return requestTaskSettings.returnDocuments() != null ? requestTaskSettings : originalSettings; + } + + private final Boolean returnDocuments; + + public CustomElandRerankTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalBoolean()); + } + + public CustomElandRerankTaskSettings(@Nullable Boolean doReturnDocuments) { + this.returnDocuments = doReturnDocuments; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (returnDocuments != null) { + builder.field(RETURN_DOCUMENTS, returnDocuments); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_RERANK; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalBoolean(returnDocuments); + } + + public Boolean returnDocuments() { + return returnDocuments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomElandRerankTaskSettings that = (CustomElandRerankTaskSettings) o; + return Objects.equals(returnDocuments, that.returnDocuments); + } + + @Override + public int hashCode() { + return Objects.hash(returnDocuments); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index cceeb59284c1b..408e3ec1ccbca 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; @@ -40,18 +41,22 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; import static org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings.MODEL_ID; @@ -85,6 +90,7 @@ public void parseRequestConfig( ) { try { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMap(config, ModelConfigurations.TASK_SETTINGS); String modelId = (String) serviceSettingsMap.get(MODEL_ID); if (modelId == null) { throw new IllegalArgumentException("Error parsing request config, model id is missing"); @@ -93,7 +99,7 @@ public void parseRequestConfig( e5Case(inferenceEntityId, taskType, config, platformArchitectures, serviceSettingsMap, modelListener); } else { throwIfNotEmptyMap(config, name()); - customElandCase(inferenceEntityId, taskType, serviceSettingsMap, modelListener); + customElandCase(inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, modelListener); } } catch (Exception e) { modelListener.onFailure(e); @@ -104,6 +110,7 @@ private void customElandCase( String inferenceEntityId, TaskType taskType, Map serviceSettingsMap, + Map taskSettingsMap, ActionListener modelListener ) { String modelId = (String) serviceSettingsMap.get(MODEL_ID); @@ -121,7 +128,18 @@ private void customElandCase( serviceSettingsMap ).build(); throwIfNotEmptyMap(serviceSettingsMap, name()); - delegate.onResponse(new CustomElandModel(inferenceEntityId, taskType, name(), customElandInternalServiceSettings)); + + var taskSettings = CustomElandModel.taskSettingsFromMap(TaskType.RERANK, taskSettingsMap); + throwIfNotEmptyMap(taskSettingsMap, name()); + + var model = CustomElandModel.build( + inferenceEntityId, + TaskType.RERANK, + name(), + customElandInternalServiceSettings, + taskSettings + ); + delegate.onResponse(model); } }); @@ -184,6 +202,7 @@ public ElasticsearchModel parsePersistedConfigWithSecrets( @Override public ElasticsearchModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMap(config, ModelConfigurations.TASK_SETTINGS); String modelId = (String) serviceSettingsMap.get(MODEL_ID); if (modelId == null) { @@ -198,14 +217,12 @@ public ElasticsearchModel parsePersistedConfig(String inferenceEntityId, TaskTyp (MultilingualE5SmallInternalServiceSettings) MultilingualE5SmallInternalServiceSettings.fromMap(serviceSettingsMap).build() ); } else { - return new CustomElandModel( - inferenceEntityId, - taskType, - name(), - (CustomElandInternalServiceSettings) CustomElandInternalServiceSettings.fromMap(serviceSettingsMap).build() - ); - } + var serviceSettings = (CustomElandInternalServiceSettings) CustomElandInternalServiceSettings.fromMap(serviceSettingsMap) + .build(); + var taskSettings = CustomElandModel.taskSettingsFromMap(taskType, taskSettingsMap); + return CustomElandModel.build(inferenceEntityId, taskType, name(), serviceSettings, taskSettings); + } } @Override @@ -218,13 +235,23 @@ public void infer( TimeValue timeout, ActionListener listener ) { - try { - checkCompatibleTaskType(model.getConfigurations().getTaskType()); - } catch (Exception e) { - listener.onFailure(e); - return; + var taskType = model.getConfigurations().getTaskType(); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + inferTextEmbedding(model, input, inputType, timeout, listener); + } else if (TaskType.RERANK.equals(taskType)) { + inferRerank(model, query, input, timeout, taskSettings, listener); + } else { + throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); } + } + public void inferTextEmbedding( + Model model, + List input, + InputType inputType, + TimeValue timeout, + ActionListener listener + ) { var request = InferTrainedModelDeploymentAction.Request.forTextInput( model.getConfigurations().getInferenceEntityId(), TextEmbeddingConfigUpdate.EMPTY_INSTANCE, @@ -239,6 +266,37 @@ public void infer( ); } + public void inferRerank( + Model model, + String query, + List inputs, + TimeValue timeout, + Map requestTaskSettings, + ActionListener listener + ) { + var config = new TextSimilarityConfigUpdate(query); + var request = InferTrainedModelDeploymentAction.Request.forTextInput( + model.getConfigurations().getInferenceEntityId(), + config, + inputs, + timeout + ); + + var modelSettings = (CustomElandRerankTaskSettings) model.getTaskSettings(); + var requestSettings = CustomElandRerankTaskSettings.fromMap(requestTaskSettings); + Boolean returnDocs = CustomElandRerankTaskSettings.of(modelSettings, requestSettings).returnDocuments(); + + Function inputSupplier = returnDocs == Boolean.TRUE ? inputs::get : i -> null; + + client.execute( + InferTrainedModelDeploymentAction.INSTANCE, + request, + listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(textSimilarityResultsToRankedDocs(inferenceResult.getResults(), inputSupplier)) + ) + ); + } + public void chunkedInfer( Model model, List input, @@ -262,10 +320,10 @@ public void chunkedInfer( TimeValue timeout, ActionListener> listener ) { - try { - checkCompatibleTaskType(model.getConfigurations().getTaskType()); - } catch (Exception e) { - listener.onFailure(e); + if (TaskType.TEXT_EMBEDDING.isAnyOrSame(model.getTaskType()) == false) { + listener.onFailure( + new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME), RestStatus.BAD_REQUEST) + ); return; } @@ -315,7 +373,7 @@ public void start(Model model, ActionListener listener) { return; } - if (model.getConfigurations().getTaskType() != TaskType.TEXT_EMBEDDING) { + if (model.getTaskType() != TaskType.TEXT_EMBEDDING && model.getTaskType() != TaskType.RERANK) { listener.onFailure( new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME)) ); @@ -364,7 +422,7 @@ public void putModel(Model model, ActionListener listener) { } }) ); - } else if (model instanceof CustomElandModel elandModel) { + } else if (model instanceof CustomElandModel) { logger.info("Custom eland model detected, model must have been already loaded into the cluster with eland."); listener.onResponse(Boolean.TRUE); } else { @@ -412,12 +470,6 @@ private static IllegalStateException notTextEmbeddingModelException(Model model) ); } - private void checkCompatibleTaskType(TaskType taskType) { - if (TaskType.TEXT_EMBEDDING.isAnyOrSame(taskType) == false) { - throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); - } - } - @Override public boolean isInClusterService() { return true; @@ -448,4 +500,36 @@ private static String selectDefaultModelVariantBasedOnClusterArchitecture(Set results, + Function inputSupplier + ) { + List rankings = new ArrayList<>(results.size()); + for (int i = 0; i < results.size(); i++) { + var result = results.get(i); + if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults similarity) { + rankings.add(new RankedDocsResults.RankedDoc(i, (float) similarity.score(), inputSupplier.apply(i))); + } else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) { + if (errorResult.getException() instanceof ElasticsearchStatusException statusException) { + throw statusException; + } else { + throw new ElasticsearchStatusException( + "Received error inference result.", + RestStatus.INTERNAL_SERVER_ERROR, + errorResult.getException() + ); + } + } else { + throw new IllegalArgumentException( + "Received invalid inference result, of type " + + result.getClass().getName() + + " but expected TextSimilarityInferenceResults." + ); + } + } + + Collections.sort(rankings); + return new RankedDocsResults(rankings); + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index f6458b48f99fc..a384dfe9a2c90 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -8,13 +8,12 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.io.IOException; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class ElasticsearchInternalServiceSettings extends InternalServiceSettings { public static final String NAME = "text_embedding_internal_service_settings"; @@ -34,7 +33,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + return TransportVersions.V_8_13_0; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java index 954469537a4cc..dc6561ba992fe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; @@ -25,6 +26,16 @@ public ElasticsearchModel( super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings)); } + public ElasticsearchModel( + String inferenceEntityId, + TaskType taskType, + String service, + ElasticsearchInternalServiceSettings serviceSettings, + TaskSettings taskSettings + ) { + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings)); + } + @Override public ElasticsearchInternalServiceSettings getServiceSettings() { return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 3347917bab2b5..d514ca6a917d4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.TransportVersion; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,8 +20,6 @@ import java.util.Arrays; import java.util.Map; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "multilingual_e5_small_service_settings"; @@ -104,11 +101,6 @@ public String getWriteableName() { return MultilingualE5SmallInternalServiceSettings.NAME; } - @Override - public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 690e8f0ddd947..fc479009d3334 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -177,14 +177,11 @@ public OpenAiEmbeddingsServiceSettings(StreamInput in) throws IOException { maxInputTokens = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_DIMENSIONS_SET_BY_USER_ADDED)) { - dimensionsSetByUser = in.readBoolean(); - } else { - dimensionsSetByUser = false; - } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + dimensionsSetByUser = in.readBoolean(); modelId = in.readString(); } else { + dimensionsSetByUser = false; modelId = "unset"; } @@ -310,10 +307,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(maxInputTokens); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_DIMENSIONS_SET_BY_USER_ADDED)) { - out.writeBoolean(dimensionsSetByUser); - } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + out.writeBoolean(dimensionsSetByUser); out.writeString(modelId); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java new file mode 100644 index 0000000000000..c87faa2b52cc8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -0,0 +1,386 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action.filter; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkItemRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.bulk.TransportShardBulkAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.inference.model.TestModel; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.After; +import org.junit.Before; +import org.mockito.stubbing.Answer; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; +import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.DEFAULT_BATCH_SIZE; +import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.getIndexRequestOrNull; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticText; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSparseEmbeddings; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.toChunkedResult; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ShardBulkInferenceActionFilterTests extends ESTestCase { + private ThreadPool threadPool; + + @Before + public void setupThreadPool() { + threadPool = new TestThreadPool(getTestName()); + } + + @After + public void tearDownThreadPool() throws Exception { + terminate(threadPool); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testFilterNoop() throws Exception { + ShardBulkInferenceActionFilter filter = createFilter(threadPool, Map.of(), DEFAULT_BATCH_SIZE); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + assertNull(((BulkShardRequest) request).getInferenceFieldMap()); + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + BulkShardRequest request = new BulkShardRequest( + new ShardId("test", "test", 0), + WriteRequest.RefreshPolicy.NONE, + new BulkItemRequest[0] + ); + request.setInferenceFieldMap( + Map.of("foo", new InferenceFieldMetadata("foo", "bar", generateRandomStringArray(5, 10, false, false))) + ); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testInferenceNotFound() throws Exception { + StaticModel model = StaticModel.createRandomInstance(); + ShardBulkInferenceActionFilter filter = createFilter( + threadPool, + Map.of(model.getInferenceEntityId(), model), + randomIntBetween(1, 10) + ); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + for (BulkItemRequest item : bulkShardRequest.items()) { + assertNotNull(item.getPrimaryResponse()); + assertTrue(item.getPrimaryResponse().isFailed()); + BulkItemResponse.Failure failure = item.getPrimaryResponse().getFailure(); + assertThat(failure.getStatus(), equalTo(RestStatus.NOT_FOUND)); + } + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + + Map inferenceFieldMap = Map.of( + "field1", + new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }), + "field2", + new InferenceFieldMetadata("field2", "inference_0", new String[] { "field2" }), + "field3", + new InferenceFieldMetadata("field3", "inference_0", new String[] { "field3" }) + ); + BulkItemRequest[] items = new BulkItemRequest[10]; + for (int i = 0; i < items.length; i++) { + items[i] = randomBulkItemRequest(Map.of(), inferenceFieldMap)[0]; + } + BulkShardRequest request = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, items); + request.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testItemFailures() throws Exception { + StaticModel model = StaticModel.createRandomInstance(); + ShardBulkInferenceActionFilter filter = createFilter( + threadPool, + Map.of(model.getInferenceEntityId(), model), + randomIntBetween(1, 10) + ); + model.putResult("I am a failure", new ErrorChunkedInferenceResults(new IllegalArgumentException("boom"))); + model.putResult("I am a success", randomSparseEmbeddings(List.of("I am a success"))); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + assertThat(bulkShardRequest.items().length, equalTo(3)); + + // item 0 is a failure + assertNotNull(bulkShardRequest.items()[0].getPrimaryResponse()); + assertTrue(bulkShardRequest.items()[0].getPrimaryResponse().isFailed()); + BulkItemResponse.Failure failure = bulkShardRequest.items()[0].getPrimaryResponse().getFailure(); + assertThat(failure.getCause().getCause().getMessage(), containsString("boom")); + + // item 1 is a success + assertNull(bulkShardRequest.items()[1].getPrimaryResponse()); + IndexRequest actualRequest = getIndexRequestOrNull(bulkShardRequest.items()[1].request()); + assertThat(XContentMapValues.extractValue("field1.text", actualRequest.sourceAsMap()), equalTo("I am a success")); + + // item 2 is a failure + assertNotNull(bulkShardRequest.items()[2].getPrimaryResponse()); + assertTrue(bulkShardRequest.items()[2].getPrimaryResponse().isFailed()); + failure = bulkShardRequest.items()[2].getPrimaryResponse().getFailure(); + assertThat(failure.getCause().getCause().getMessage(), containsString("boom")); + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + + Map inferenceFieldMap = Map.of( + "field1", + new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }) + ); + BulkItemRequest[] items = new BulkItemRequest[3]; + items[0] = new BulkItemRequest(0, new IndexRequest("index").source("field1", "I am a failure")); + items[1] = new BulkItemRequest(1, new IndexRequest("index").source("field1", "I am a success")); + items[2] = new BulkItemRequest(2, new IndexRequest("index").source("field1", "I am a failure")); + BulkShardRequest request = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, items); + request.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testManyRandomDocs() throws Exception { + Map inferenceModelMap = new HashMap<>(); + int numModels = randomIntBetween(1, 5); + for (int i = 0; i < numModels; i++) { + StaticModel model = StaticModel.createRandomInstance(); + inferenceModelMap.put(model.getInferenceEntityId(), model); + } + + int numInferenceFields = randomIntBetween(1, 5); + Map inferenceFieldMap = new HashMap<>(); + for (int i = 0; i < numInferenceFields; i++) { + String field = randomAlphaOfLengthBetween(5, 10); + String inferenceId = randomFrom(inferenceModelMap.keySet()); + inferenceFieldMap.put(field, new InferenceFieldMetadata(field, inferenceId, new String[] { field })); + } + + int numRequests = randomIntBetween(100, 1000); + BulkItemRequest[] originalRequests = new BulkItemRequest[numRequests]; + BulkItemRequest[] modifiedRequests = new BulkItemRequest[numRequests]; + for (int id = 0; id < numRequests; id++) { + BulkItemRequest[] res = randomBulkItemRequest(inferenceModelMap, inferenceFieldMap); + originalRequests[id] = res[0]; + modifiedRequests[id] = res[1]; + } + + ShardBulkInferenceActionFilter filter = createFilter(threadPool, inferenceModelMap, randomIntBetween(10, 30)); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + assertThat(request, instanceOf(BulkShardRequest.class)); + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + BulkItemRequest[] items = bulkShardRequest.items(); + assertThat(items.length, equalTo(originalRequests.length)); + for (int id = 0; id < items.length; id++) { + IndexRequest actualRequest = getIndexRequestOrNull(items[id].request()); + IndexRequest expectedRequest = getIndexRequestOrNull(modifiedRequests[id].request()); + try { + assertToXContentEquivalent(expectedRequest.source(), actualRequest.source(), expectedRequest.getContentType()); + } catch (Exception exc) { + throw new IllegalStateException(exc); + } + } + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + BulkShardRequest original = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, originalRequests); + original.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, original, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings("unchecked") + private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool, Map modelMap, int batchSize) { + ModelRegistry modelRegistry = mock(ModelRegistry.class); + Answer unparsedModelAnswer = invocationOnMock -> { + String id = (String) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock + .getArguments()[1]; + var model = modelMap.get(id); + if (model != null) { + listener.onResponse( + new ModelRegistry.UnparsedModel( + model.getInferenceEntityId(), + model.getTaskType(), + model.getServiceSettings().model(), + XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(model.getTaskSettings()), false), + XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(model.getSecretSettings()), false) + ) + ); + } else { + listener.onFailure(new ResourceNotFoundException("model id [{}] not found", id)); + } + return null; + }; + doAnswer(unparsedModelAnswer).when(modelRegistry).getModelWithSecrets(any(), any()); + + InferenceService inferenceService = mock(InferenceService.class); + Answer chunkedInferAnswer = invocationOnMock -> { + StaticModel model = (StaticModel) invocationOnMock.getArguments()[0]; + List inputs = (List) invocationOnMock.getArguments()[2]; + ActionListener> listener = (ActionListener< + List>) invocationOnMock.getArguments()[7]; + Runnable runnable = () -> { + List results = new ArrayList<>(); + for (String input : inputs) { + results.add(model.getResults(input)); + } + listener.onResponse(results); + }; + if (randomBoolean()) { + try { + threadPool.generic().execute(runnable); + } catch (Exception exc) { + listener.onFailure(exc); + } + } else { + runnable.run(); + } + return null; + }; + doAnswer(chunkedInferAnswer).when(inferenceService).chunkedInfer(any(), any(), any(), any(), any(), any(), any(), any()); + + Answer modelAnswer = invocationOnMock -> { + String inferenceId = (String) invocationOnMock.getArguments()[0]; + return modelMap.get(inferenceId); + }; + doAnswer(modelAnswer).when(inferenceService).parsePersistedConfigWithSecrets(any(), any(), any(), any()); + + InferenceServiceRegistry inferenceServiceRegistry = mock(InferenceServiceRegistry.class); + when(inferenceServiceRegistry.getService(any())).thenReturn(Optional.of(inferenceService)); + ShardBulkInferenceActionFilter filter = new ShardBulkInferenceActionFilter(inferenceServiceRegistry, modelRegistry, batchSize); + return filter; + } + + private static BulkItemRequest[] randomBulkItemRequest( + Map modelMap, + Map fieldInferenceMap + ) { + Map docMap = new LinkedHashMap<>(); + Map expectedDocMap = new LinkedHashMap<>(); + XContentType requestContentType = randomFrom(XContentType.values()); + for (var entry : fieldInferenceMap.values()) { + String field = entry.getName(); + var model = modelMap.get(entry.getInferenceId()); + String text = randomAlphaOfLengthBetween(10, 100); + docMap.put(field, text); + expectedDocMap.put(field, text); + if (model == null) { + // ignore results, the doc should fail with a resource not found exception + continue; + } + var result = randomSemanticText(field, model, List.of(text), requestContentType); + model.putResult(text, toChunkedResult(result)); + expectedDocMap.put(field, result); + } + + int requestId = randomIntBetween(0, Integer.MAX_VALUE); + return new BulkItemRequest[] { + new BulkItemRequest(requestId, new IndexRequest("index").source(docMap, requestContentType)), + new BulkItemRequest(requestId, new IndexRequest("index").source(expectedDocMap, requestContentType)) }; + } + + private static class StaticModel extends TestModel { + private final Map resultMap; + + StaticModel( + String inferenceEntityId, + TaskType taskType, + String service, + TestServiceSettings serviceSettings, + TestTaskSettings taskSettings, + TestSecretSettings secretSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, taskSettings, secretSettings); + this.resultMap = new HashMap<>(); + } + + public static StaticModel createRandomInstance() { + TestModel testModel = TestModel.createRandomInstance(); + return new StaticModel( + testModel.getInferenceEntityId(), + testModel.getTaskType(), + randomAlphaOfLength(10), + testModel.getServiceSettings(), + testModel.getTaskSettings(), + testModel.getSecretSettings() + ); + } + + ChunkedInferenceServiceResults getResults(String text) { + return resultMap.getOrDefault(text, new ChunkedSparseEmbeddingResults(List.of())); + } + + void putResult(String text, ChunkedInferenceServiceResults result) { + resultMap.put(text, result); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java index 129b39a2f7b33..567e26101283e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -45,6 +46,7 @@ import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests.createModel; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; @@ -54,6 +56,11 @@ public class AzureOpenAiActionCreatorTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private static final Settings ZERO_TIMEOUT_SETTINGS = buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ); private final MockWebServer webServer = new MockWebServer(); private ThreadPool threadPool; private HttpClientManager clientManager; @@ -116,7 +123,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -166,7 +173,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), null); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), null); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -174,12 +181,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() throws IOException { // timeout as zero for no retries - var settings = buildSettingsWithRetryFields( - TimeValue.timeValueMillis(1), - TimeValue.timeValueMinutes(1), - TimeValue.timeValueSeconds(0) - ); - var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); try (var sender = senderFactory.createSender("test_service")) { sender.start(); @@ -226,7 +228,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abc"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -295,13 +297,13 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); } { validateRequestWithApiKey(webServer.requests().get(1), "apikey"); var requestMap = entityAsMap(webServer.requests().get(1).getBody()); - validateRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); } } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -371,13 +373,13 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("abcd"), "overridden_user"); } { validateRequestWithApiKey(webServer.requests().get(1), "apikey"); var requestMap = entityAsMap(webServer.requests().get(1).getBody()); - validateRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("ab"), "overridden_user"); } } catch (URISyntaxException e) { throw new RuntimeException(e); @@ -429,13 +431,186 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { validateRequestWithApiKey(webServer.requests().get(0), "apikey"); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - validateRequestMapWithUser(requestMap, List.of("sup"), "overridden_user"); + validateEmbeddingsRequestMapWithUser(requestMap, List.of("sup"), "overridden_user"); } catch (URISyntaxException e) { throw new RuntimeException(e); } } - private void validateRequestMapWithUser(Map requestMap, List input, @Nullable String user) { + public void testInfer_AzureOpenAiCompletion_WithOverriddenUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var originalUser = "original_user"; + var overriddenUser = "overridden_user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var model = createCompletionModel("resource", "deployment", "apiversion", originalUser, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var taskSettingsWithUserOverride = createRequestTaskSettingsMap(overriddenUser); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, taskSettingsWithUserOverride); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), overriddenUser); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_WithoutUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(null); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), null); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_FailsFromInvalidResponseFormat() throws IOException { + // timeout as zero for no retries + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + // "choices" missing + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + var userOverride = "overridden_user"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(userOverride); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer))) + ); + assertThat( + thrownException.getCause().getMessage(), + is("Failed to find required field [choices] in Azure OpenAI completions response") + ); + + assertThat(webServer.requests(), hasSize(1)); + validateRequestWithApiKey(webServer.requests().get(0), apiKey); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), userOverride); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + private void validateEmbeddingsRequestMapWithUser(Map requestMap, List input, @Nullable String user) { var expectedSize = user == null ? 1 : 2; assertThat(requestMap.size(), is(expectedSize)); @@ -446,6 +621,24 @@ private void validateRequestMapWithUser(Map requestMap, List requestMap, List input, @Nullable String user) { + assertThat("input for completions can only be of size 1", input.size(), equalTo(1)); + + var expectedSize = user == null ? 2 : 3; + + assertThat(requestMap.size(), is(expectedSize)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input.get(0))); + + if (user != null) { + assertThat(requestMap.get("user"), is(user)); + } + } + + @SuppressWarnings("unchecked") + public static String getContentOfMessageInRequestMap(Map requestMap) { + return ((Map) ((List) requestMap.get("messages")).get(0)).get("content").toString(); + } + private void validateRequestWithApiKey(MockRequest request, String apiKey) { assertNull(request.getUri().getQuery()); assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java new file mode 100644 index 0000000000000..96127841c17a8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var user = "user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var action = createAction("resource", "deployment", "apiversion", user, apiKey, sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), is(XContentType.JSON.mediaType())); + assertThat(request.getHeader(AzureOpenAiUtils.API_KEY_HEADER), is(apiKey)); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + + var requestMap = entityAsMap(request.getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(completionInput)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + private AzureOpenAiCompletionAction createAction( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable String user, + String apiKey, + Sender sender, + String inferenceEntityId + ) { + try { + var model = createCompletionModel(resourceName, deploymentId, apiVersion, user, apiKey, null, inferenceEntityId); + model.setUri(new URI(getUrl(webServer))); + return new AzureOpenAiCompletionAction(sender, model, createWithEmptySettings(threadPool)); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java new file mode 100644 index 0000000000000..2d37f273e1de2 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiRequest.MISSING_AUTHENTICATION_ERROR_MESSAGE; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AzureOpenAiRequestTests extends ESTestCase { + + public void testDecorateWithAuthHeader_apiKeyPresent() { + var apiKey = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(apiKey, null); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var apiKeyHeader = httpPost.getFirstHeader(API_KEY_HEADER); + + assertThat(apiKeyHeader.getValue(), equalTo(apiKey.toString())); + } + + public void testDecorateWithAuthHeader_entraIdPresent() { + var entraId = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(null, entraId); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var authHeader = httpPost.getFirstHeader(HttpHeaders.AUTHORIZATION); + + assertThat(authHeader.getValue(), equalTo("Bearer " + entraId)); + } + + public void testDecorateWithAuthHeader_entraIdAndApiKeyMissing_throwMissingAuthValidationException() { + var httpPost = new HttpPost(); + var secretSettingsMock = mock(AzureOpenAiSecretSettings.class); + + when(secretSettingsMock.entraId()).thenReturn(null); + when(secretSettingsMock.apiKey()).thenReturn(null); + + ValidationException exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettingsMock) + ); + assertTrue(exception.getMessage().contains(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..7647a4983f4be --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequestEntity; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class AzureOpenAiCompletionRequestEntityTests extends ESTestCase { + + public void testXContent_WritesSingleMessage_DoesNotWriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1}""")); + } + + public void testXContent_WritesSingleMessage_WriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), "user"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1,"user":"user"}""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java new file mode 100644 index 0000000000000..048d4ea16d56f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTests extends ESTestCase { + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + protected AzureOpenAiCompletionRequest createRequest( + String resource, + String deployment, + String apiVersion, + String apiKey, + String entraId, + String input, + String user + ) { + var completionModel = AzureOpenAiCompletionModelTests.createCompletionModel( + resource, + deployment, + apiVersion, + user, + apiKey, + entraId, + "id" + ); + + return new AzureOpenAiCompletionRequest(List.of(input), completionModel); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java similarity index 96% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java index 14283ed53eed9..f732a01c893e8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java @@ -5,13 +5,14 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequestEntity; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java similarity index 73% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java index 88e6880b72f0b..bbd8a49d65f46 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; @@ -14,56 +14,69 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests; import java.io.IOException; -import java.net.URISyntaxException; import java.util.List; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AzureOpenAiEmbeddingsRequestTests extends ESTestCase { - public void testCreateRequest_WithApiKeyDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", "apikey", null, "abc", "user"); + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is("apikey")); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } - public void testCreateRequest_WithEntraIdDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", null, "entraId", "abc", "user"); + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer entraId")); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { @@ -87,7 +100,7 @@ public void testIsTruncated_ReturnsTrue() { assertTrue(truncatedRequest.getTruncationInfo()[0]); } - public static AzureOpenAiEmbeddingsRequest createRequest( + public AzureOpenAiEmbeddingsRequest createRequest( String resourceName, String deploymentId, String apiVersion, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java new file mode 100644 index 0000000000000..47aff8dad65db --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class CohereUtilsTests extends ESTestCase { + + public void testCreateRequestSourceHeader() { + var requestSourceHeader = CohereUtils.createRequestSourceHeader(); + + assertThat(requestSourceHeader.getName(), is("Request-Source")); + assertThat(requestSourceHeader.getValue(), is("unspecified:elasticsearch")); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index 4f7cd9ea89a14..897c648eb942f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -106,6 +106,24 @@ public void testPositionParserAtTokenAfterField_ThrowsWithMalformedJSON() throws } } + public void testPositionParserAtTokenAfterField_ConsumesUntilEnd() throws IOException { + var json = """ + { + "key": { + "foo": "bar" + }, + "target": "value" + } + """; + + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "target", errorFormat); + assertEquals("value", parser.text()); + } + } + public void testConsumeUntilObjectEnd() throws IOException { var json = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java new file mode 100644 index 0000000000000..ec76f43a6d52f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java @@ -0,0 +1,217 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "choices": [ + { + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "result", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion", + "prompt_filter_results": [ + { + "prompt_index": 0, + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + } + } + ], + "usage": { + "completion_tokens": 138, + "prompt_tokens": 11, + "total_tokens": 149 + } + }"""; + + ChatCompletionResults chatCompletionResults = AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [choices] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotAnArray() { + String responseJson = """ + { + "choices": { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + }, + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [FIELD_NAME]") + ); + } + + public void testFromResponse_FailsWhenMessageDoesNotExist() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "not_message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [message] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenMessageValueIsAString() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": "string" + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [VALUE_STRING]") + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java index 18f702014e2d8..5604d6573144e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -35,7 +34,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { "index": 0, "message": { "role": "assistant", - "content": "some content" + "content": "result" }, "logprobs": null, "finish_reason": "stop" @@ -55,7 +54,8 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); } public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { @@ -74,7 +74,7 @@ public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, @@ -112,7 +112,7 @@ public void testFromResponse_FailsWhenChoicesFieldNotAnArray() { }, "logprobs": null, "finish_reason": "stop" - }, + } }, "usage": { "prompt_tokens": 46, @@ -153,7 +153,7 @@ public void testFromResponse_FailsWhenMessageDoesNotExist() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 26f6e5b7e694a..7d0d076a0a22c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -31,6 +31,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalTimeValue; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; @@ -71,6 +72,21 @@ public void testRemoveAsTypeWithTheCorrectType() { assertThat(map.entrySet(), empty()); } + public void testRemoveAsType_Validation_WithTheCorrectType() { + Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 1.0)); + + ValidationException validationException = new ValidationException(); + Integer i = ServiceUtils.removeAsType(map, "a", Integer.class, validationException); + assertEquals(Integer.valueOf(5), i); + assertNull(map.get("a")); // field has been removed + assertThat(validationException.validationErrors(), empty()); + + String str = ServiceUtils.removeAsType(map, "b", String.class, validationException); + assertEquals("a string", str); + assertNull(map.get("b")); + assertThat(validationException.validationErrors(), empty()); + } + public void testRemoveAsTypeWithInCorrectType() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 5.0, "e", 5)); @@ -113,6 +129,62 @@ public void testRemoveAsTypeWithInCorrectType() { assertThat(map.entrySet(), empty()); } + public void testRemoveAsType_Validation_WithInCorrectType() { + Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 5.0, "e", 5)); + + var validationException = new ValidationException(); + Object result = ServiceUtils.removeAsType(map, "a", String.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [a] is not of the expected type. The value [5] cannot be converted to a [String]") + ); + + validationException = new ValidationException(); + ServiceUtils.removeAsType(map, "b", Boolean.class, validationException); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [b] is not of the expected type. The value [a string] cannot be converted to a [Boolean]") + ); + assertNull(map.get("b")); + + validationException = new ValidationException(); + result = ServiceUtils.removeAsType(map, "c", Integer.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [c] is not of the expected type. The value [true] cannot be converted to a [Integer]") + ); + assertNull(map.get("c")); + + // cannot convert double to integer + validationException = new ValidationException(); + result = ServiceUtils.removeAsType(map, "d", Integer.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [d] is not of the expected type. The value [5.0] cannot be converted to a [Integer]") + ); + assertNull(map.get("d")); + + // cannot convert integer to double + validationException = new ValidationException(); + result = ServiceUtils.removeAsType(map, "e", Double.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [e] is not of the expected type. The value [5] cannot be converted to a [Double]") + ); + assertNull(map.get("d")); + + assertThat(map.entrySet(), empty()); + } + public void testRemoveAsTypeMissingReturnsNull() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE)); assertNull(ServiceUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); @@ -197,10 +269,11 @@ public void testExtractRequiredSecureString_AddsException_WhenFieldIsEmpty() { public void testExtractRequiredString_CreatesString() { var validation = new ValidationException(); + validation.addValidationError("previous error"); Map map = modifiableMap(Map.of("key", "value")); var createdString = extractRequiredString(map, "key", "scope", validation); - assertTrue(validation.validationErrors().isEmpty()); + assertThat(validation.validationErrors(), hasSize(1)); assertNotNull(createdString); assertThat(createdString, is("value")); assertTrue(map.isEmpty()); @@ -208,24 +281,27 @@ public void testExtractRequiredString_CreatesString() { public void testExtractRequiredString_AddsException_WhenFieldDoesNotExist() { var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("key", "value")); var createdString = extractRequiredSecureString(map, "abc", "scope", validation); assertNull(createdString); - assertFalse(validation.validationErrors().isEmpty()); + assertThat(validation.validationErrors(), hasSize(2)); assertThat(map.size(), is(1)); - assertThat(validation.validationErrors().get(0), is("[scope] does not contain the required setting [abc]")); + assertThat(validation.validationErrors().get(1), is("[scope] does not contain the required setting [abc]")); } public void testExtractRequiredString_AddsException_WhenFieldIsEmpty() { var validation = new ValidationException(); + validation.addValidationError("previous error"); Map map = modifiableMap(Map.of("key", "")); var createdString = extractOptionalString(map, "key", "scope", validation); assertNull(createdString); assertFalse(validation.validationErrors().isEmpty()); assertTrue(map.isEmpty()); - assertThat(validation.validationErrors().get(0), is("[scope] Invalid value empty string. [key] must be a non-empty string")); + assertThat(validation.validationErrors().get(1), is("[scope] Invalid value empty string. [key] must be a non-empty string")); } public void testExtractOptionalString_CreatesString() { @@ -241,11 +317,12 @@ public void testExtractOptionalString_CreatesString() { public void testExtractOptionalString_DoesNotAddException_WhenFieldDoesNotExist() { var validation = new ValidationException(); + validation.addValidationError("previous error"); Map map = modifiableMap(Map.of("key", "value")); var createdString = extractOptionalString(map, "abc", "scope", validation); assertNull(createdString); - assertTrue(validation.validationErrors().isEmpty()); + assertThat(validation.validationErrors(), hasSize(1)); assertThat(map.size(), is(1)); } @@ -260,6 +337,14 @@ public void testExtractOptionalString_AddsException_WhenFieldIsEmpty() { assertThat(validation.validationErrors().get(0), is("[scope] Invalid value empty string. [key] must be a non-empty string")); } + public void testExtractOptionalPositiveInt() { + var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("abc", 1)); + assertEquals(Integer.valueOf(1), extractOptionalPositiveInteger(map, "abc", "scope", validation)); + assertThat(validation.validationErrors(), hasSize(1)); + } + public void testExtractOptionalEnum_ReturnsNull_WhenFieldDoesNotExist() { var validation = new ValidationException(); Map map = modifiableMap(Map.of("key", "value")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java new file mode 100644 index 0000000000000..93d948a5bdcf3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class AzureOpenAiCompletionModelTests extends ESTestCase { + + public void testOverrideWith_UpdatedTaskSettings_OverridesUser() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiVersion = "api version"; + var apiKey = "api key"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var user = "user"; + var userOverride = "user override"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var requestTaskSettingsMap = taskSettingsMap(userOverride); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat( + overriddenModel, + equalTo(createCompletionModel(resource, deploymentId, apiVersion, userOverride, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testOverrideWith_EmptyMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var requestTaskSettingsMap = Map.of(); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var overriddenModel = AzureOpenAiCompletionModel.of(model, null); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_UpdatedServiceSettings_OverridesApiVersion() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var apiVersion = "api version"; + var updatedApiVersion = "updated api version"; + + var updatedServiceSettings = new AzureOpenAiCompletionServiceSettings(resource, deploymentId, updatedApiVersion, null); + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var overriddenModel = new AzureOpenAiCompletionModel(model, updatedServiceSettings); + + assertThat( + overriddenModel, + is(createCompletionModel(resource, deploymentId, updatedApiVersion, user, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + } + + public static AzureOpenAiCompletionModel createModelWithRandomValues() { + return createCompletionModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + + public static AzureOpenAiCompletionModel createCompletionModel( + String resourceName, + String deploymentId, + String apiVersion, + String user, + @Nullable String apiKey, + @Nullable String entraId, + String inferenceEntityId + ) { + var secureApiKey = apiKey != null ? new SecureString(apiKey.toCharArray()) : null; + var secureEntraId = entraId != null ? new SecureString(entraId.toCharArray()) : null; + + return new AzureOpenAiCompletionModel( + inferenceEntityId, + TaskType.COMPLETION, + "service", + new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null), + new AzureOpenAiCompletionTaskSettings(user), + new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) + ); + } + + private Map taskSettingsMap(String user) { + Map taskSettingsMap = new HashMap<>(); + taskSettingsMap.put(AzureOpenAiServiceFields.USER, user); + return taskSettingsMap; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..51963c275a08a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_WhenMapIsEmpty() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsEmptySettings_WhenMapDoesNotContainKnownFields() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsUser() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + assertThat(settings.user(), is("user")); + } + + public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { + var exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java new file mode 100644 index 0000000000000..cbaa41c37958d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionServiceSettingsTests extends AbstractWireSerializingTestCase { + + private static AzureOpenAiCompletionServiceSettings createRandom() { + var resourceName = randomAlphaOfLength(8); + var deploymentId = randomAlphaOfLength(8); + var apiVersion = randomAlphaOfLength(8); + + return new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null); + } + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var resourceName = "this-resource"; + var deploymentId = "this-deployment"; + var apiVersion = "2024-01-01"; + + var serviceSettings = AzureOpenAiCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + AzureOpenAiServiceFields.RESOURCE_NAME, + resourceName, + AzureOpenAiServiceFields.DEPLOYMENT_ID, + deploymentId, + AzureOpenAiServiceFields.API_VERSION, + apiVersion + ) + ) + ); + + assertThat(serviceSettings, is(new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null))); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024","rate_limit":{"requests_per_minute":120}}""")); + } + + public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = entity.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionServiceSettings::new; + } + + @Override + protected AzureOpenAiCompletionServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AzureOpenAiCompletionServiceSettings mutateInstance(AzureOpenAiCompletionServiceSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java new file mode 100644 index 0000000000000..7f0e730b8835c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; +import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static AzureOpenAiCompletionTaskSettings createRandomWithUser() { + return new AzureOpenAiCompletionTaskSettings(randomAlphaOfLength(15)); + } + + public static AzureOpenAiCompletionTaskSettings createRandom() { + var user = randomBoolean() ? randomAlphaOfLength(15) : null; + return new AzureOpenAiCompletionTaskSettings(user); + } + + public void testFromMap_WithUser() { + var user = "user"; + + assertThat( + new AzureOpenAiCompletionTaskSettings(user), + is(AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user)))) + ); + } + + public void testFromMap_UserIsEmptyString() { + var thrownException = expectThrows( + ValidationException.class, + () -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;")) + ); + } + + public void testFromMap_MissingUser_DoesNotThrowException() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(taskSettings.user()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of( + taskSettings, + AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS + ); + assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var user = "user"; + var userOverride = "user override"; + + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user))); + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap( + new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, userOverride)) + ); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of(taskSettings, requestTaskSettings); + assertThat(overriddenTaskSettings, is(new AzureOpenAiCompletionTaskSettings(userOverride))); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionTaskSettings::new; + } + + @Override + protected AzureOpenAiCompletionTaskSettings createTestInstance() { + return createRandomWithUser(); + } + + @Override + protected AzureOpenAiCompletionTaskSettings mutateInstance(AzureOpenAiCompletionTaskSettings instance) throws IOException { + return createRandomWithUser(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java index aebc2240983f7..1747155623a98 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import java.net.URISyntaxException; import java.util.Map; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettingsTests.getAzureOpenAiRequestTaskSettingsMap; @@ -65,6 +66,35 @@ public void testCreateModel_FromUpdatedServiceSettings() { assertThat(overridenModel, is(createModel("resource", "deployment", "override_apiversion", "user", "api_key", null, "id"))); } + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); + } + + public static AzureOpenAiEmbeddingsModel createModelWithRandomValues() { + return createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + public static AzureOpenAiEmbeddingsModel createModel( String resourceName, String deploymentId, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 1ac97642f0b85..a306a3e660cd9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.cohere.embeddings; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -247,7 +246,7 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError_ForPersistent() { public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { var exception = expectThrows( - ElasticsearchStatusException.class, + ValidationException.class, () -> CohereEmbeddingsServiceSettings.fromMap( new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, List.of("abc"))), ConfigurationParseContext.PERSISTENT @@ -256,7 +255,7 @@ public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { MatcherAssert.assertThat( exception.getMessage(), - is("field [embedding_type] is not of the expected type. The value [[abc]] cannot be converted to a [String]") + containsString("field [embedding_type] is not of the expected type. The value [[abc]] cannot be converted to a [String]") ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 7212edbb8cf8c..ea11e9d0343e3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -24,15 +24,22 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; +import org.junit.After; +import org.junit.Before; +import org.mockito.Mockito; import java.util.ArrayList; import java.util.Arrays; @@ -41,6 +48,7 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -59,6 +67,18 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { TaskType taskType = TaskType.TEXT_EMBEDDING; String randomInferenceEntityId = randomAlphaOfLength(10); + private static ThreadPool threadPool; + + @Before + public void setUpThreadPool() { + threadPool = new TestThreadPool("test"); + } + + @After + public void shutdownThreadPool() { + TestThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + } + public void testParseRequestConfig() { // Null model variant @@ -220,6 +240,95 @@ public void testParseRequestConfig() { } } + @SuppressWarnings("unchecked") + public void testParseRequestConfig_Rerank() { + // with task settings + { + var client = mock(Client.class); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse( + new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) + ); + return null; + }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); + + when(client.threadPool()).thenReturn(threadPool); + + var service = createService(client); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + var returnDocs = randomBoolean(); + settings.put( + ModelConfigurations.TASK_SETTINGS, + new HashMap<>(Map.of(CustomElandRerankTaskSettings.RETURN_DOCUMENTS, returnDocs)) + ); + + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(CustomElandModel.class)); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertThat(model.getServiceSettings(), instanceOf(ElasticsearchInternalServiceSettings.class)); + assertEquals(returnDocs, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + }, e -> { fail("Model parsing failed " + e.getMessage()); }); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + } + + @SuppressWarnings("unchecked") + public void testParseRequestConfig_Rerank_DefaultTaskSettings() { + // with task settings + { + var client = mock(Client.class); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse( + new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) + ); + return null; + }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); + + when(client.threadPool()).thenReturn(threadPool); + + var service = createService(client); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(CustomElandModel.class)); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertThat(model.getServiceSettings(), instanceOf(ElasticsearchInternalServiceSettings.class)); + assertEquals(Boolean.TRUE, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + }, e -> { fail("Model parsing failed " + e.getMessage()); }); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + } + private ActionListener getModelVerificationActionListener(MultilingualE5SmallInternalServiceSettings e5ServiceSettings) { return ActionListener.wrap(model -> { assertEquals( @@ -480,6 +589,61 @@ public void testChunkInferSetsTokenization() { } } + public void testParsePersistedConfig_Rerank() { + // with task settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + settings.put(InternalServiceSettings.MODEL_ID, "foo"); + var returnDocs = randomBoolean(); + settings.put( + ModelConfigurations.TASK_SETTINGS, + new HashMap<>(Map.of(CustomElandRerankTaskSettings.RETURN_DOCUMENTS, returnDocs)) + ); + + var model = service.parsePersistedConfig(randomInferenceEntityId, TaskType.RERANK, settings); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertEquals(returnDocs, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + } + + // without task settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + settings.put(InternalServiceSettings.MODEL_ID, "foo"); + + var model = service.parsePersistedConfig(randomInferenceEntityId, TaskType.RERANK, settings); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertTrue(((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + } + } + private ElasticsearchInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElasticsearchInternalService(context); diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index a594c577dcdd2..2f6127c44957f 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; @@ -21,9 +22,9 @@ public class InferenceRestIT extends ESClientYamlSuiteTestCase { public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .setting("xpack.security.enabled", "false") .setting("xpack.security.http.ssl.enabled", "false") - .plugin("x-pack-inference") .plugin("inference-service-test") - .distribution(DistributionType.INTEG_TEST) + .feature(FeatureFlag.SEMANTIC_TEXT_ENABLED) + .distribution(DistributionType.DEFAULT) .build(); public InferenceRestIT(final ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml new file mode 100644 index 0000000000000..067b79aec1fdd --- /dev/null +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -0,0 +1,773 @@ +setup: + - skip: + version: " - 8.14.99" + reason: semantic_text introduced in 8.15.0 + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "similarity": "cosine", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + +--- +"Calculates text expansion and embedding results for new documents": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Inference fields do not create new mappings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + indices.get_mapping: + index: test-index + + - match: {test-index.mappings.properties.sparse_field.type: semantic_text} + - match: {test-index.mappings.properties.dense_field.type: semantic_text} + - match: {test-index.mappings.properties.non_inference_field.type: text} + - length: {test-index.mappings.properties: 3} + +--- +"Sparse vector results are indexed as nested chunks and searchable": + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{}} + {"sparse_field": ["you know, for testing", "now with chunks"]} + {"index":{}} + {"sparse_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + nested: + path: sparse_field.inference.chunks + query: + text_expansion: + sparse_field.inference.chunks.embeddings: + model_id: sparse-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - length: { hits.hits.0._source.sparse_field.inference.chunks: 2 } + - length: { hits.hits.1._source.sparse_field.inference.chunks: 2 } + + # Search with inner hits + - do: + search: + _source: false + index: test-index + body: + query: + nested: + path: sparse_field.inference.chunks + inner_hits: + _source: false + fields: [sparse_field.inference.chunks.text] + query: + text_expansion: + sparse_field.inference.chunks.embeddings: + model_id: sparse-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.relation: eq } + + - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.1.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.sparse_field\.inference\.chunks.hits.hits.1.fields.sparse_field\.inference\.chunks.0.text: 1 } + + +--- +"Dense vector results are indexed as nested chunks and searchable": + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{}} + {"dense_field": ["you know, for testing", "now with chunks"]} + {"index":{}} + {"dense_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + nested: + path: dense_field.inference.chunks + query: + knn: + field: dense_field.inference.chunks.embeddings + query_vector_builder: + text_embedding: + model_id: dense-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - length: { hits.hits.0._source.dense_field.inference.chunks: 2 } + - length: { hits.hits.1._source.dense_field.inference.chunks: 2 } + + # Search with inner hits + - do: + search: + _source: false + index: test-index + body: + query: + nested: + path: dense_field.inference.chunks + inner_hits: + _source: false + fields: [dense_field.inference.chunks.text] + query: + knn: + field: dense_field.inference.chunks.embeddings + query_vector_builder: + text_embedding: + model_id: dense-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.relation: eq } + + - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.1.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.dense_field\.inference\.chunks.hits.hits.1.fields.dense_field\.inference\.chunks.0.text: 1 } + + + +--- +"Updating non semantic_text fields does not recalculate embeddings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - set: { _source.sparse_field.inference.chunks.0.embeddings: sparse_field_embedding } + - set: { _source.dense_field.inference.chunks.0.embeddings: dense_field_embedding } + + - do: + update: + index: test-index + id: doc_1 + body: + doc: + non_inference_field: "another non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } + - match: { _source.non_inference_field: "another non inference test" } + +--- +"Updating semantic_text fields recalculates embeddings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "I am a test" } + - match: { _source.sparse_field.inference.chunks.0.text: "I am a test" } + - match: { _source.dense_field.text: "I am a teapot" } + - match: { _source.dense_field.inference.chunks.0.text: "I am a teapot" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + update: + index: test-index + id: doc_1 + body: + doc: + sparse_field: "updated inference test" + dense_field: "another updated inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "bulk inference test", "dense_field": "bulk updated inference test"}}' + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "bulk inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "bulk inference test" } + - match: { _source.dense_field.text: "bulk updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "bulk updated inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Reindex works for semantic_text fields": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - set: { _source.sparse_field.inference.chunks.0.embeddings: sparse_field_embedding } + - set: { _source.dense_field.inference.chunks.0.embeddings: dense_field_embedding } + + - do: + indices.refresh: { } + + - do: + indices.create: + index: destination-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + reindex: + wait_for_completion: true + body: + source: + index: test-index + dest: + index: destination-index + - do: + get: + index: destination-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Fails for non-existent inference": + - do: + indices.create: + index: incorrect-test-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: non-existing-inference-id + non_inference_field: + type: text + + - do: + catch: missing + index: + index: incorrect-test-index + id: doc_1 + body: + sparse_field: "inference test" + non_inference_field: "non inference test" + + - match: { error.reason: "Inference id [non-existing-inference-id] not found for field [sparse_field]" } + + # Succeeds when semantic_text field is not used + - do: + index: + index: incorrect-test-index + id: doc_1 + body: + non_inference_field: "non inference test" + +--- +"Updates with script are not allowed": + - do: + bulk: + index: test-index + body: + - '{"index": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"script": "ctx._source.new_field = \"hello\"", "scripted_upsert": true}' + + - match: { errors: true } + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Cannot apply update with a script on indices that contain [semantic_text] field(s)" } + +--- +"semantic_text copy_to calculate inference for source fields": + - do: + indices.create: + index: test-copy-to-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + source_field: + type: text + copy_to: sparse_field + another_source_field: + type: text + copy_to: sparse_field + + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + source_field: "copy_to inference test" + sparse_field: "inference test" + another_source_field: "another copy_to inference test" + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - length: { _source.sparse_field.inference.chunks: 3 } + - match: { _source.sparse_field.inference.chunks.0.text: "another copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.1.text: "copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.1.embeddings + - match: { _source.sparse_field.inference.chunks.2.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.2.embeddings + + +--- +"semantic_text copy_to needs values for every source field for updates": + - do: + indices.create: + index: test-copy-to-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + source_field: + type: text + copy_to: sparse_field + another_source_field: + type: text + copy_to: sparse_field + + # Not every source field needed on creation + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + source_field: "a single source field provided" + sparse_field: "inference test" + + # Every source field needed on bulk updates + - do: + bulk: + body: + - '{"update": {"_index": "test-copy-to-index", "_id": "doc_1"}}' + - '{"doc": {"source_field": "a single source field is kept as provided via bulk", "sparse_field": "updated inference test" }}' + + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Field [another_source_field] must be specified on an update request to calculate inference for field [sparse_field]" } + + +--- +"Update works for now - but will be unsupported later to avoid dealing with missing semantic_text fields content or copy_to fields": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + update: + index: test-index + id: doc_1 + body: + doc: { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + +--- +"Calculates text expansion results for bulk updates - index": + - do: + bulk: + body: + - '{"index": {"_index": "test-index", "_id": "doc_1"}}' + - '{"sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test"}' + + - match: { errors: false } + - match: { items.0.index.result: "created" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Calculates text expansion results for bulk updates - update": + - do: + bulk: + body: + - '{"index": {"_index": "test-index", "_id": "doc_1"}}' + - '{"sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test"}' + + - match: { errors: false } + - match: { items.0.index.result: "created" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + + # Script update not supported + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"script": {"source": {"ctx.sparse_field": "updated inference test"}}}' + + - match: { errors: true } + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Cannot apply update with a script on indices that contain [semantic_text] field(s)" } + +--- +"Calculates text expansion results for bulk updates - upsert": + # Initial update fails + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }}' + + - match: { errors: true } + - match: { items.0.update.status: 404 } + + # Update as upsert + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "created" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + + +--- +"Update by query picks up new semantic_text fields": + + - do: + indices.create: + index: mapping-update-index + body: + mappings: + dynamic: false + properties: + non_inference_field: + type: text + + - do: + index: + index: mapping-update-index + id: doc_1 + refresh: true + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + indices.put_mapping: + index: mapping-update-index + body: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + update_by_query: + wait_for_completion: true + index: mapping-update-index + + - match: { updated: 1 } + + - do: + get: + index: mapping-update-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Update by query works for scripts": + + - do: + index: + index: test-index + id: doc_1 + refresh: true + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + update_by_query: + wait_for_completion: true + index: test-index + body: { "script": "ctx._source.sparse_field = 'updated inference test'; ctx._source.dense_field = 'another updated inference test'" } + + - match: { updated: 1 } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.dense_field.text: "another updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index 03f1aaf8577cf..127ea31fa7798 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -705,6 +705,11 @@ protected AggregateMetricSyntheticFieldLoader(String name, String simpleName, En this.metrics = metrics; } + @Override + public String fieldName() { + return name; + } + @Override public Stream> storedFieldLoaders() { return Stream.of(); diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index ebf060f520c5a..0dc37ab9e7251 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -372,6 +372,11 @@ public void write(XContentBuilder b) throws IOException { b.field(simpleName(), fieldType().value); } } + + @Override + public String fieldName() { + return name(); + } }; } } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 26f5ea053771c..f42dcc6179d04 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,6 +1,5 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.dra.DraResolvePlugin -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -115,12 +114,6 @@ artifacts { archives tasks.named("jar") } -if (BuildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.semantic_text_feature_flag_enabled', 'true' - } -} - tasks.register("extractNativeLicenses", Copy) { dependsOn configurations.nativeBundle into "${buildDir}/extractedNativeLicenses" diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java index 4d90d2a186858..058b64894f8b0 100644 --- a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java @@ -59,10 +59,10 @@ public void testIngestWithMultipleModelTypes() throws IOException { assertThat(simulatedDocs, hasSize(2)); assertEquals(inferenceServiceModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(0))); var sparseEmbedding = (Map) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(0)); - assertEquals(Double.valueOf(1.0), sparseEmbedding.get("1")); + assertNotNull(sparseEmbedding.get("feature_1")); assertEquals(inferenceServiceModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(1))); sparseEmbedding = (Map) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(1)); - assertEquals(Double.valueOf(1.0), sparseEmbedding.get("1")); + assertNotNull(sparseEmbedding.get("feature_1")); } { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index 57aba2bb80d68..f09d867087664 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; @@ -71,7 +70,7 @@ public void addMlState() { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java index 2e16436736e89..2f8165e6a20be 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java @@ -15,8 +15,8 @@ import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -60,7 +60,7 @@ public void addMlState() { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 6cb467af525c9..bc8e4794d7daa 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.OperationRouting; @@ -200,7 +199,7 @@ protected void updateModelSnapshotOnJob(ModelSnapshot modelSnapshot) { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.get(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index ae128b507c795..675933808c603 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; @@ -1101,7 +1100,7 @@ private void indexQuantiles(Quantiles quantiles) { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index a2b00974d4038..33fd7c108863b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -144,7 +144,6 @@ public void testLoseDedicatedMasterNode() throws Exception { }); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104081") public void testFullClusterRestart() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); ensureStableCluster(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java index 4c8382047e796..ee96d154ab55e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java @@ -9,9 +9,9 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ToXContent; @@ -57,7 +57,7 @@ public void createComponents() { client(), clusterService().state(), TestIndexNameExpressionResolver.newInstance(client().threadPool().getThreadContext()), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index c849e69c780bd..a2d8fd1d60316 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -146,7 +146,7 @@ public void clusterChanged(ClusterChangedEvent event) { AnnotationIndex.createAnnotationsIndexIfNecessary( client, event.state(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ActionListener.wrap(r -> isIndexCreationInProgress.set(false), e -> { if (e.getMessage().equals(previousException)) { logger.debug("Error creating ML annotations index or aliases", e); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java index 18086748d6fe0..bd80e362f2f71 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java @@ -177,7 +177,8 @@ FrequentItemSet toFrequentItemSet(List fields) throws IOException { int pos = items.nextSetBit(0); while (pos > 0) { Tuple item = transactionStore.getItem(topItemIds.getItemIdAt(pos - 1)); - assert item.v1() < fields.size() : "item id exceed number of given items, did you configure eclat correctly?"; + assert item.v1() < fields.size() + : "eclat error: item id (" + item.v1() + ") exceeds the number of given items (" + fields.size() + ")"; final Field field = fields.get(item.v1()); Object formattedValue = field.formatValue(item.v2()); String fieldName = fields.get(item.v1()).getName(); @@ -252,19 +253,20 @@ public FrequentItemSetCollector(TransactionStore transactionStore, TopItemIds to this.topItemIds = topItemIds; this.size = size; this.min = min; - queue = new FrequentItemSetPriorityQueue(size); - frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); + this.queue = new FrequentItemSetPriorityQueue(size); + this.frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); } public FrequentItemSet[] finalizeAndGetResults(List fields) throws IOException { - FrequentItemSet[] topFrequentItems = new FrequentItemSet[size()]; + FrequentItemSet[] topFrequentItems = new FrequentItemSet[queue.size()]; for (int i = topFrequentItems.length - 1; i >= 0; i--) { topFrequentItems[i] = queue.pop().toFrequentItemSet(fields); } return topFrequentItems; } - public int size() { + // Visible for testing + int size() { return queue.size(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java index 72bfb6f1f0394..0f9555c77341f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java @@ -86,17 +86,15 @@ protected ItemSetMapReduceAggregator( boolean rewriteBasedOnOrdinals = false; - if (ctx.isPresent()) { - for (var c : configsAndValueFilters) { - ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() - .getAggregator(registryKey, c.v1()) - .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx.get()); - if (e.getField().getName() != null) { - fields.add(e.getField()); - valueSources.add(e); - } - rewriteBasedOnOrdinals |= e.usesOrdinals(); + for (var c : configsAndValueFilters) { + ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() + .getAggregator(registryKey, c.v1()) + .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx); + if (e.getField().getName() != null) { + fields.add(e.getField()); + valueSources.add(e); } + rewriteBasedOnOrdinals |= e.usesOrdinals(); } this.rewriteBasedOnOrdinals = rewriteBasedOnOrdinals; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java index c9ec772eb3321..08adecd3fbce5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java @@ -37,6 +37,7 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; /** * Interface to extract values from Lucene in order to feed it into the MapReducer. @@ -53,7 +54,7 @@ ItemSetMapReduceValueSource build( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException; } @@ -345,20 +346,21 @@ public KeywordValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException { super(config, id, ValueFormatter.BYTES_REF); if (AbstractItemSetMapReducer.OrdinalOptimization.GLOBAL_ORDINALS.equals(ordinalOptimization) && config.getValuesSource() instanceof Bytes.WithOrdinals - && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping()) { + && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping() + && ctx.isPresent()) { logger.debug("Use ordinals for field [{}]", config.fieldContext().field()); this.executionStrategy = new GlobalOrdinalsStrategy( getField(), (Bytes.WithOrdinals) config.getValuesSource(), includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(config.format()), - ctx + ctx.get() ); } else { this.executionStrategy = new MapStrategy( @@ -394,7 +396,7 @@ public NumericValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization unusedOrdinalOptimization, - LeafReaderContext unusedCtx + Optional unusedCtx ) { super(config, id, ValueFormatter.LONG); this.source = (Numeric) config.getValuesSource(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java index 9fc97ff234c58..4ee294bcf0d8c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java @@ -256,14 +256,14 @@ private void createStatsIndexIfNecessary() { client, clusterState, indexNameExpressionResolver, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ActionListener.wrap( r -> ElasticsearchMappings.addDocMappingIfMissing( MlStatsIndex.writeAlias(), MlStatsIndex::wrappedMapping, client, clusterState, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, listener, MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION ), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java index 525d3adba7457..c7074f8e7285e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java @@ -87,7 +87,7 @@ record ResultProcessor(String question, String resultsField, TextSimilarityConfi @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult, boolean chunkResult) { if (chunkResult) { - throw chunkingNotSupportedException(TaskType.NER); + throw chunkingNotSupportedException(TaskType.TEXT_SIMILARITY); } if (pyTorchResult.getInferenceResult().length < 1) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java index f6fa7ca9005c5..571d9b89a32df 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java @@ -101,7 +101,7 @@ public TextExpansionQueryBuilder(StreamInput in) throws IOException { this.fieldName = in.readString(); this.modelText = in.readString(); this.modelId = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.tokenPruningConfig = in.readOptionalWriteable(TokenPruningConfig::new); } else { this.tokenPruningConfig = null; @@ -144,7 +144,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeString(modelText); out.writeString(modelId); - if (out.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalWriteable(tokenPruningConfig); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java index 51139881fc2e4..1e96cb293be3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java @@ -221,7 +221,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED; + return TransportVersions.V_8_13_0; } private static float parseWeight(String token, Object weight) throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java index 2f3ccaa313b0d..f0f7dec448d99 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java @@ -61,6 +61,7 @@ import java.util.Map; import static java.util.Collections.singletonMap; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.elasticsearch.xpack.ml.DefaultMachineLearningExtension.ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS; import static org.hamcrest.Matchers.arrayContaining; @@ -334,10 +335,7 @@ private Map testCreateDestinationIndex(DataFrameAnalysis analysi clock, config, ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS, - ActionListener.wrap( - response -> fail("should not succeed"), - e -> assertThat(e.getMessage(), Matchers.matchesRegex(finalErrorMessage)) - ) + assertNoSuccessListener(e -> assertThat(e.getMessage(), Matchers.matchesRegex(finalErrorMessage))) ); return null; @@ -578,8 +576,7 @@ public void testCreateDestinationIndex_ResultsFieldsExistsInSourceIndex() { clock, config, ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS, - ActionListener.wrap( - response -> fail("should not succeed"), + assertNoSuccessListener( e -> assertThat( e.getMessage(), equalTo("A field that matches the dest.results_field [ml] already exists; please set a different results_field") diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 7a314b82024be..8d83156b0e0ee 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -100,7 +100,6 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; -import static org.elasticsearch.action.support.master.MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_HIDDEN; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; @@ -268,7 +267,7 @@ public void testOpenJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); assertEquals(1, manager.numberOfOpenJobs()); assertTrue(manager.jobHasActiveAutodetectProcess(jobTask)); ArgumentCaptor captor = ArgumentCaptor.forClass(JobTaskState.class); @@ -296,7 +295,7 @@ public void testOpenJob_withoutVersion() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(job.getId()); AtomicReference errorHolder = new AtomicReference<>(); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> errorHolder.set(e)); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> errorHolder.set(e)); Exception error = errorHolder.get(); assertThat(error, is(notNullValue())); assertThat(error.getMessage(), equalTo("Cannot open job [no_version] because jobs created prior to version 5.5 are not supported")); @@ -339,22 +338,22 @@ public void testOpenJob_exceedMaxNumJobs() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("bar"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("baz"); when(jobTask.getAllocationId()).thenReturn(2L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); assertEquals(3, manager.numberOfOpenJobs()); Exception[] holder = new Exception[1]; jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foobar"); when(jobTask.getAllocationId()).thenReturn(3L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> holder[0] = e); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> holder[0] = e); Exception e = holder[0]; assertEquals("max running job capacity [3] reached", e.getMessage()); @@ -363,7 +362,7 @@ public void testOpenJob_exceedMaxNumJobs() { when(jobTask.getJobId()).thenReturn("baz"); manager.closeJob(jobTask, null); assertEquals(2, manager.numberOfOpenJobs()); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e1, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e1, b) -> {}); assertEquals(3, manager.numberOfOpenJobs()); } @@ -374,7 +373,7 @@ public void testProcessData() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); DataLoadParams params = new DataLoadParams(TimeRange.builder().build(), Optional.empty()); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -401,7 +400,7 @@ public void testProcessDataThrowsElasticsearchStatusException_onIoException() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); Exception[] holder = new Exception[1]; manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> holder[0] = e); assertNotNull(holder[0]); @@ -413,7 +412,7 @@ public void testCloseJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -443,7 +442,7 @@ public void testVacate() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); when(jobTask.triggerVacate()).thenReturn(true); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -475,7 +474,7 @@ public void testCanCloseClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -528,7 +527,7 @@ public void testCanKillClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -562,7 +561,7 @@ public void testBucketResetMessageIsSent() { InputStream inputStream = createInputStream(""); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> {}); verify(autodetectCommunicator).writeToJob(same(inputStream), same(analysisRegistry), same(xContentType), same(params), any()); } @@ -573,7 +572,7 @@ public void testFlush() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); InputStream inputStream = createInputStream(""); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -617,7 +616,7 @@ public void testCloseThrows() { // create a jobtask JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -660,7 +659,7 @@ public void testJobHasActiveAutodetectProcess() { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -683,7 +682,7 @@ public void testKillKillsAutodetectProcess() throws IOException { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -728,7 +727,7 @@ public void testProcessData_GivenStateNotOpened() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); InputStream inputStream = createInputStream(""); DataCounts[] dataCounts = new DataCounts[1]; manager.processData( @@ -836,7 +835,7 @@ public void testGetOpenProcessMemoryUsage() { AutodetectProcessManager manager = createSpyManager(); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); long expectedSizeBytes = Job.PROCESS_MEMORY_OVERHEAD.getBytes() + switch (assignmentMemoryBasis) { case MODEL_MEMORY_LIMIT -> modelMemoryLimitBytes; @@ -905,7 +904,7 @@ private AutodetectProcessManager createSpyManagerAndCallProcessData(String jobId AutodetectProcessManager manager = createSpyManager(); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(jobId); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java index 0d8f3aad27daa..05ab989f444fe 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java @@ -133,7 +133,9 @@ public Request(StreamInput in) throws IOException { waitForResourcesCreated = in.readBoolean(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public boolean waitForResourcesCreated() { return waitForResourcesCreated; diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 7243eae34ac6b..af4595c5bbd76 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -31,7 +31,7 @@ public static class CsvSpecParser implements SpecReader.Parser { private final StringBuilder earlySchema = new StringBuilder(); private final StringBuilder query = new StringBuilder(); private final StringBuilder data = new StringBuilder(); - private final List requiredFeatures = new ArrayList<>(); + private final List requiredCapabilities = new ArrayList<>(); private CsvTestCase testCase; private CsvSpecParser() {} @@ -43,8 +43,8 @@ public Object parse(String line) { if (line.startsWith(SCHEMA_PREFIX)) { assertThat("Early schema already declared " + earlySchema, earlySchema.length(), is(0)); earlySchema.append(line.substring(SCHEMA_PREFIX.length()).trim()); - } else if (line.toLowerCase(Locale.ROOT).startsWith("required_feature:")) { - requiredFeatures.add(line.substring("required_feature:".length()).trim()); + } else if (line.toLowerCase(Locale.ROOT).startsWith("required_capability:")) { + requiredCapabilities.add(line.substring("required_capability:".length()).trim()); } else { if (line.endsWith(";")) { // pick up the query @@ -52,8 +52,8 @@ public Object parse(String line) { query.append(line.substring(0, line.length() - 1).trim()); testCase.query = query.toString(); testCase.earlySchema = earlySchema.toString(); - testCase.requiredFeatures = List.copyOf(requiredFeatures); - requiredFeatures.clear(); + testCase.requiredCapabilities = List.copyOf(requiredCapabilities); + requiredCapabilities.clear(); earlySchema.setLength(0); query.setLength(0); } @@ -111,7 +111,7 @@ public static class CsvTestCase { private final List expectedWarningsRegexString = new ArrayList<>(); private final List expectedWarningsRegex = new ArrayList<>(); public boolean ignoreOrder; - public List requiredFeatures = List.of(); + public List requiredCapabilities = List.of(); // The emulated-specific warnings must always trail the non-emulated ones, if these are present. Otherwise, the closing bracket // would need to be changed to a less common sequence (like `]#` maybe). diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java index 96284b2826e48..e37823f8d3c4c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java @@ -90,7 +90,12 @@ public ClusterState execute(ClusterState currentState) throws Exception { ? original.getIndices().get(0).getName() + "-broken" : original.getIndices().get(0).getName(); DataStream broken = original.copy() - .setIndices(List.of(new Index(brokenIndexName, "broken"), original.getIndices().get(1))) + .setBackingIndices( + original.getBackingIndices() + .copy() + .setIndices(List.of(new Index(brokenIndexName, "broken"), original.getIndices().get(1))) + .build() + ) .build(); brokenDataStreamHolder.set(broken); return ClusterState.builder(currentState) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 7c753692628cb..286a9cb736b1b 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -7,11 +7,13 @@ package org.elasticsearch.integration; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; @@ -25,10 +27,15 @@ import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.junit.After; @@ -39,25 +46,31 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; +import java.util.function.Consumer; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; /** - * Tests that file settings service can properly add role mappings and detect REST clashes - * with the reserved role mappings. + * Tests that file settings service can properly add role mappings. */ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { @@ -135,12 +148,21 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { } }"""; + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + Settings.Builder builder = Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + // some tests make use of cluster-state based role mappings + .put("xpack.security.authc.cluster_state_role_mappings.enabled", true); + return builder.build(); + } + @After public void cleanUp() { updateClusterSettings(Settings.builder().putNull("indices.recovery.max_bytes_per_sec")); } - private void writeJSONFile(String node, String json) throws Exception { + public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { long version = versionCounter.incrementAndGet(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -151,10 +173,11 @@ private void writeJSONFile(String node, String json) throws Exception { Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); - logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); + logger.info("--> before writing JSON config to node {} with path {}", node, tempFilePath); logger.info(Strings.format(json, version)); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + logger.info("--> after writing JSON config to node {} with path {}", node, tempFilePath); } private Tuple setupClusterStateListener(String node, String expectedKey) { @@ -238,49 +261,41 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo expectThrows(ExecutionException.class, () -> clusterAdmin().updateSettings(req).get()).getMessage() ); + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); + } + + // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(r -> r.getName()).collect(Collectors.toSet()), - allOf(notNullValue(), containsInAnyOrder("everyone_kibana", "everyone_fleet")) - ); + assertFalse(response.hasMappings()); + assertThat(response.mappings(), emptyArray()); - // Try using the REST API to update the everyone_kibana role mapping - // This should fail, we have reserved certain role mappings in operator mode - assertEquals( - "Failed to process request " - + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " - + "with errors: [[everyone_kibana] set as read-only by [file_settings]]", - expectThrows( - IllegalArgumentException.class, - () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet() - ).getMessage() - ); - assertEquals( - "Failed to process request " - + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " - + "with errors: [[everyone_fleet] set as read-only by [file_settings]]", - expectThrows( - IllegalArgumentException.class, - () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet() - ).getMessage() - ); + // role mappings (with the same names) can also be stored in the "native" store + var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); + assertTrue(putRoleMappingResponse.isCreated()); + putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); + assertTrue(putRoleMappingResponse.isCreated()); } public void testRoleMappingsApplied() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - writeJSONFile(internalCluster().getMasterName(), testJSON); + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -292,32 +307,65 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); + // native role mappings are not affected by the removal of the cluster-state based ones + { + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder("everyone_kibana", "everyone_fleet") + ); + } + + // and roles are resolved based on the native role mappings + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); + } + + { + var request = new DeleteRoleMappingRequest(); + request.setName("everyone_kibana"); + var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); + assertTrue(response.isFound()); + request = new DeleteRoleMappingRequest(); + request.setName("everyone_fleet"); + response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); + assertTrue(response.isFound()); + } + + // no roles are resolved now, because both native and cluster-state based stores have been cleared + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), empty()); + } } - private Tuple setupClusterStateListenerForError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); + public static Tuple setupClusterStateListenerForError( + ClusterService clusterService, + Consumer errorMetadataConsumer + ) { CountDownLatch savedClusterState = new CountDownLatch(1); AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null - && reservedState.errorMetadata() != null - && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.PARSING) { + if (reservedState != null && reservedState.errorMetadata() != null) { clusterService.removeListener(this); metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); - assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat( - reservedState.errorMetadata().errors().get(0), - containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") - ); + errorMetadataConsumer.accept(reservedState.errorMetadata()); } } }); @@ -325,22 +373,13 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } - private void assertRoleMappingsNotSaved(CountDownLatch savedClusterState, AtomicLong metadataVersion) throws Exception { - boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - // This should succeed, nothing was reserved - client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana_bad")).get(); - client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet_ok")).get(); - } - public void testErrorSaved() throws Exception { ensureGreen(); // save an empty file to clear any prior state, this ensures we don't get a stale file left over by another test var savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -353,76 +392,94 @@ public void testErrorSaved() throws Exception { ); // save a bad file - savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - - writeJSONFile(internalCluster().getMasterName(), testErrorJSON); - assertRoleMappingsNotSaved(savedClusterState.v1(), savedClusterState.v2()); - } - - private Tuple setupClusterStateListenerForSecurityWriteError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); - CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); - clusterService.addListener(new ClusterStateListener() { - @Override - public void clusterChanged(ClusterChangedEvent event) { - ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null - && reservedState.errorMetadata() != null - && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.VALIDATION) { - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat(reservedState.errorMetadata().errors().get(0), containsString("closed")); - } + savedClusterState = setupClusterStateListenerForError( + internalCluster().getCurrentMasterNodeInstance(ClusterService.class), + errorMetadata -> { + assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, errorMetadata.errorKind()); + assertThat(errorMetadata.errors(), allOf(notNullValue(), hasSize(1))); + assertThat( + errorMetadata.errors().get(0), + containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") + ); } - }); - - return new Tuple<>(savedClusterState, metadataVersion); - } - - public void testRoleMappingFailsToWriteToStore() throws Exception { - ensureGreen(); - - var savedClusterState = setupClusterStateListenerForSecurityWriteError(internalCluster().getMasterName()); - - final CloseIndexResponse closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(closeIndexResponse.isAcknowledged()); + ); - writeJSONFile(internalCluster().getMasterName(), testJSON); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + writeJSONFile(internalCluster().getMasterName(), testErrorJSON, logger, versionCounter); + awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - - final ClusterStateResponse clusterStateResponse = clusterAdmin().state( - new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get()) - ).get(); + // no roles are resolved because both role mapping stores are empty + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), empty()); + } + } - assertNull( - clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) - ); + public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { + ensureGreen(); - ReservedStateMetadata reservedState = clusterStateResponse.getState() - .metadata() - .reservedStateMetadata() - .get(FileSettingsService.NAMESPACE); + // expect the role mappings to apply even if the .security index is closed + var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); - assertTrue(handlerMetadata == null || handlerMetadata.keys().isEmpty()); + try { + var closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(closeIndexResponse.isAcknowledged()); + + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + // no native role mappings exist + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); + + // cluster state settings are also applied + var clusterStateResponse = clusterAdmin().state(new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get())) + .get(); + assertThat( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()), + equalTo("50mb") + ); + + ReservedStateMetadata reservedState = clusterStateResponse.getState() + .metadata() + .reservedStateMetadata() + .get(FileSettingsService.NAMESPACE); + + ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); + assertThat(handlerMetadata.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); + + // and roles are resolved based on the cluster-state role mappings + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); + } + } finally { + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var openIndexResponse = indicesAdmin().open(new OpenIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(openIndexResponse.isAcknowledged()); + } } private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { - "enabled": false, - "roles": [ "kibana_user" ], + "enabled": true, + "roles": [ "kibana_user_native" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" @@ -433,8 +490,7 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var bis = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); - return PutRoleMappingRequest.fromMapping(mapping); + return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java deleted file mode 100644 index 48e97b7afb897..0000000000000 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.analysis.common.CommonAnalysisPlugin; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; -import org.elasticsearch.cluster.metadata.ReservedStateMetadata; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.Strings; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.reservedstate.service.FileSettingsService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.transport.netty4.Netty4Plugin; -import org.elasticsearch.xpack.wildcard.Wildcard; - -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.Arrays; -import java.util.Collection; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.notNullValue; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) -public class FileSettingsRoleMappingsStartupIT extends SecurityIntegTestCase { - - private static AtomicLong versionCounter = new AtomicLong(1); - private static String testJSONForFailedCase = """ - { - "metadata": { - "version": "%s", - "compatibility": "8.4.0" - }, - "state": { - "role_mappings": { - "everyone_kibana_2": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_foo": "something" - } - } - } - } - }"""; - - @Override - protected void doAssertXPackIsInstalled() {} - - @Override - protected Path nodeConfigPath(int nodeOrdinal) { - return null; - } - - private void writeJSONFile(String node, String json) throws Exception { - long version = versionCounter.incrementAndGet(); - - FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); - - Files.deleteIfExists(fileSettingsService.watchedFile()); - - Files.createDirectories(fileSettingsService.watchedFileDir()); - Path tempFilePath = createTempFile(); - - logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); - logger.info(Strings.format(json, version)); - Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); - Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); - } - - private Tuple setupClusterStateListenerForError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); - CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); - clusterService.addListener(new ClusterStateListener() { - @Override - public void clusterChanged(ClusterChangedEvent event) { - ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null && reservedState.errorMetadata() != null) { - assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat(reservedState.errorMetadata().errors().get(0), containsString("Fake exception")); - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - } else if (reservedState != null) { - logger.debug(() -> "Got reserved state update without error metadata: " + reservedState); - } else { - logger.debug(() -> "Got cluster state update: " + event.source()); - } - } - }); - - return new Tuple<>(savedClusterState, metadataVersion); - } - - @TestLogging( - value = "org.elasticsearch.common.file:DEBUG,org.elasticsearch.xpack.security:DEBUG,org.elasticsearch.cluster.metadata:DEBUG", - reason = "https://github.com/elastic/elasticsearch/issues/98391" - ) - public void testFailsOnStartMasterNodeWithError() throws Exception { - internalCluster().setBootstrapMasterNodeIndex(0); - - internalCluster().startMasterOnlyNode(); - - logger.info("--> write some role mappings, no other file settings"); - writeJSONFile(internalCluster().getMasterName(), testJSONForFailedCase); - var savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - } - - public Collection> nodePlugins() { - return Arrays.asList( - UnstableLocalStateSecurity.class, - Netty4Plugin.class, - ReindexPlugin.class, - CommonAnalysisPlugin.class, - InternalSettingsPlugin.class, - MapperExtrasPlugin.class, - Wildcard.class - ); - } - -} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 58d6657b99e32..076ac01f1c8f3 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -2673,7 +2673,9 @@ public void testUpdateApiKeysAutoUpdatesLegacySuperuserRoleDescriptor() throws E // raw document has the legacy superuser role descriptor expectRoleDescriptorsForApiKey("limited_by_role_descriptors", legacySuperuserRoleDescriptor, getApiKeyDocument(apiKeyId)); - final Set currentSuperuserRoleDescriptors = Set.of(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + final Set currentSuperuserRoleDescriptors = ApiKeyService.removeUserRoleDescriptorDescriptions( + Set.of(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR) + ); // The first request is not a noop because we are auto-updating the legacy role descriptors to 8.x role descriptors assertSingleUpdate( apiKeyId, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index ef08f855a46cc..0ff4f1160af56 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1103,8 +1103,7 @@ Collection createComponents( new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService) ); - reservedRoleMappingAction.set(new ReservedRoleMappingAction(nativeRoleMappingStore)); - systemIndices.getMainIndexManager().onStateRecovered(state -> reservedRoleMappingAction.get().securityIndexRecovered()); + reservedRoleMappingAction.set(new ReservedRoleMappingAction()); cacheInvalidatorRegistry.validate(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 852887767578f..73d1a1abcdb50 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -7,24 +7,18 @@ package org.elasticsearch.xpack.security.action.rolemapping; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.GroupedActionListener; -import org.elasticsearch.common.util.concurrent.ListenableFuture; -import org.elasticsearch.reservedstate.NonStateTransformResult; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -38,123 +32,59 @@ * It is used by the ReservedClusterStateService to add/update or remove role mappings. Typical usage * for this action is in the context of file based settings. */ -public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { +public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { public static final String NAME = "role_mappings"; - private final NativeRoleMappingStore roleMappingStore; - private final ListenableFuture securityIndexRecoveryListener = new ListenableFuture<>(); - - /** - * Creates a ReservedRoleMappingAction - * - * @param roleMappingStore requires {@link NativeRoleMappingStore} for storing/deleting the mappings - */ - public ReservedRoleMappingAction(NativeRoleMappingStore roleMappingStore) { - this.roleMappingStore = roleMappingStore; - } - @Override public String name() { return NAME; } - private static Collection prepare(List roleMappings) { - List requests = roleMappings.stream().map(rm -> PutRoleMappingRequest.fromMapping(rm)).toList(); - - var exceptions = new ArrayList(); - for (var request : requests) { - // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX - var exception = request.validate(false); - if (exception != null) { - exceptions.add(exception); - } - } - - if (exceptions.isEmpty() == false) { - var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); - exceptions.forEach(illegalArgumentException::addSuppressed); - throw illegalArgumentException; - } - - return requests; - } - @Override public TransformState transform(Object source, TransformState prevState) throws Exception { - // We execute the prepare() call to catch any errors in the transform phase. - // Since we store the role mappings outside the cluster state, we do the actual save with a - // non cluster state transform call. @SuppressWarnings("unchecked") - var requests = prepare((List) source); - return new TransformState( - prevState.state(), - prevState.keys(), - l -> securityIndexRecoveryListener.addListener( - ActionListener.wrap(ignored -> nonStateTransform(requests, prevState, l), l::onFailure) - ) - ); - } - - // Exposed for testing purposes - protected void nonStateTransform( - Collection requests, - TransformState prevState, - ActionListener listener - ) { - Set entities = requests.stream().map(r -> r.getName()).collect(Collectors.toSet()); - Set toDelete = new HashSet<>(prevState.keys()); - toDelete.removeAll(entities); - - final int tasksCount = requests.size() + toDelete.size(); - - // Nothing to do, don't start a group listener with 0 actions - if (tasksCount == 0) { - listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Set.of())); - return; - } - - GroupedActionListener taskListener = new GroupedActionListener<>(tasksCount, new ActionListener<>() { - @Override - public void onResponse(Collection booleans) { - listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Collections.unmodifiableSet(entities))); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - - for (var request : requests) { - roleMappingStore.putRoleMapping(request, taskListener); - } - - for (var mappingToDelete : toDelete) { - var deleteRequest = new DeleteRoleMappingRequest(); - deleteRequest.setName(mappingToDelete); - roleMappingStore.deleteRoleMapping(deleteRequest, taskListener); + Set roleMappings = validate((List) source); + RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); + if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { + return prevState; + } else { + ClusterState newState = newRoleMappingMetadata.updateClusterState(prevState.state()); + Set entities = newRoleMappingMetadata.getRoleMappings() + .stream() + .map(ExpressionRoleMapping::getName) + .collect(Collectors.toSet()); + return new TransformState(newState, entities); } } @Override - public List fromXContent(XContentParser parser) throws IOException { - List result = new ArrayList<>(); - + public List fromXContent(XContentParser parser) throws IOException { + List result = new ArrayList<>(); Map source = parser.map(); - for (String name : source.keySet()) { @SuppressWarnings("unchecked") Map content = (Map) source.get(name); try (XContentParser mappingParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, mappingParser); - result.add(mapping); + result.add(new PutRoleMappingRequestBuilder(null).source(name, mappingParser).request()); } } - return result; } - public void securityIndexRecovered() { - securityIndexRecoveryListener.onResponse(null); + private Set validate(List roleMappings) { + var exceptions = new ArrayList(); + for (var roleMapping : roleMappings) { + // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX + var exception = roleMapping.validate(false); + if (exception != null) { + exceptions.add(exception); + } + } + if (exceptions.isEmpty() == false) { + var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); + exceptions.forEach(illegalArgumentException::addSuppressed); + throw illegalArgumentException; + } + return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index 811d357b89f89..b4e8d5d6db83f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; @@ -18,12 +18,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import java.util.Optional; -import java.util.Set; - -public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledTransportAction< - DeleteRoleMappingRequest, - DeleteRoleMappingResponse> { +public class TransportDeleteRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -31,25 +26,20 @@ public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledT public TransportDeleteRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, NativeRoleMappingStore roleMappingStore ) { - super(DeleteRoleMappingAction.NAME, clusterService, transportService, actionFilters, DeleteRoleMappingRequest::new); + super( + DeleteRoleMappingAction.NAME, + transportService, + actionFilters, + DeleteRoleMappingRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecuteProtected(Task task, DeleteRoleMappingRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); } - - @Override - public Optional reservedStateHandlerName() { - return Optional.of(ReservedRoleMappingAction.NAME); - } - - @Override - public Set modifiedKeys(DeleteRoleMappingRequest request) { - return Set.of(request.getName()); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 5e32e4f903f81..44c72bc13a54b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; @@ -18,10 +18,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import java.util.Optional; -import java.util.Set; - -public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTransportAction { +public class TransportPutRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -29,32 +26,17 @@ public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTran public TransportPutRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, NativeRoleMappingStore roleMappingStore ) { - super(PutRoleMappingAction.NAME, clusterService, transportService, actionFilters, PutRoleMappingRequest::new); + super(PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecuteProtected( - Task task, - final PutRoleMappingRequest request, - final ActionListener listener - ) { + protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) ); } - - @Override - public Optional reservedStateHandlerName() { - return Optional.of(ReservedRoleMappingAction.NAME); - } - - @Override - public Set modifiedKeys(PutRoleMappingRequest request) { - return Set.of(request.getName()); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 55a89e184f84f..883d7cb8ab103 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -371,7 +371,13 @@ && hasRemoteIndices(request.getRoleDescriptors())) { } } - private Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { + /** + * This method removes description from the given user's (limited-by) role descriptors. + * The description field is not supported for API key role descriptors hence storing limited-by roles with descriptions + * would be inconsistent and require handling backwards compatibility. + * Hence why we have to remove them before create/update of API key roles. + */ + static Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { return userRoleDescriptors.stream().map(roleDescriptor -> { if (roleDescriptor.hasDescription()) { return new RoleDescriptor( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index e7e24037543fa..55562c8ee0138 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -8,6 +8,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -17,6 +19,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -57,12 +60,18 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - final String name = request.param("name"); - PutRoleMappingRequestBuilder requestBuilder = new PutRoleMappingRequestBuilder(client).source( - name, - request.requiredContent(), - request.getXContentType() - ).setRefreshPolicy(request.param("refresh")); + String name = request.param("name"); + String refresh = request.param("refresh"); + PutRoleMappingRequestBuilder requestBuilder; + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + request.requiredContent(), + request.getXContentType() + ) + ) { + requestBuilder = new PutRoleMappingRequestBuilder(client).source(name, parser).setRefreshPolicy(refresh); + } return channel -> requestBuilder.execute(new RestBuilderListener<>(channel) { @Override public RestResponse buildResponse(PutRoleMappingResponse response, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java deleted file mode 100644 index b4a07093e49c3..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; - -/** - * Mock Security Provider implementation for the {@link ReservedClusterStateHandlerProvider} service interface. This is used - * for {@link org.elasticsearch.test.ESIntegTestCase} because the Security Plugin is really LocalStateSecurity in those tests. - *

- * Unlike {@link LocalReservedSecurityStateHandlerProvider} this implementation is mocked to implement the - * {@link UnstableLocalStateSecurity}. Separate implementation is needed, because the SPI creation code matches the constructor - * signature when instantiating. E.g. we need to match {@link UnstableLocalStateSecurity} instead of {@link LocalStateSecurity} - */ -public class LocalReservedUnstableSecurityStateHandlerProvider extends LocalReservedSecurityStateHandlerProvider { - public LocalReservedUnstableSecurityStateHandlerProvider() { - throw new IllegalStateException("Provider must be constructed using PluginsService"); - } - - public LocalReservedUnstableSecurityStateHandlerProvider(UnstableLocalStateSecurity plugin) { - super(plugin); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java deleted file mode 100644 index 5621bdced15b3..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reservedstate.NonStateTransformResult; -import org.elasticsearch.reservedstate.ReservedClusterStateHandler; -import org.elasticsearch.reservedstate.TransformState; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; - -import java.nio.file.Path; -import java.util.Collection; -import java.util.List; -import java.util.Optional; - -/** - * A test class that allows us to Inject new type of Reserved Handler that can - * simulate errors in saving role mappings. - *

- * We can't use our regular path to simply make an extension of LocalStateSecurity - * in an integration test class, because the reserved handlers are injected through - * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) - */ -public final class UnstableLocalStateSecurity extends LocalStateSecurity { - - public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { - super(settings, configPath); - // We reuse most of the initialization of LocalStateSecurity, we then just overwrite - // the security plugin with an extra method to give us a fake RoleMappingAction. - Optional security = plugins.stream().filter(p -> p instanceof Security).findFirst(); - if (security.isPresent()) { - plugins.remove(security.get()); - } - - UnstableLocalStateSecurity thisVar = this; - var action = new ReservedUnstableRoleMappingAction(); - - plugins.add(new Security(settings, super.securityExtensions()) { - @Override - protected SSLService getSslService() { - return thisVar.getSslService(); - } - - @Override - protected XPackLicenseState getLicenseState() { - return thisVar.getLicenseState(); - } - - @Override - List> reservedClusterStateHandlers() { - // pretend the security index is initialized after 2 seconds - var timer = new java.util.Timer(); - timer.schedule(new java.util.TimerTask() { - @Override - public void run() { - action.securityIndexRecovered(); - timer.cancel(); - } - }, 2_000); - return List.of(action); - } - }); - } - - public static class ReservedUnstableRoleMappingAction extends ReservedRoleMappingAction { - /** - * Creates a fake ReservedRoleMappingAction that doesn't actually use the role mapping store - */ - public ReservedUnstableRoleMappingAction() { - // we don't actually need a NativeRoleMappingStore - super(null); - } - - /** - * The nonStateTransform method is the only one that uses the native store, we simply pretend - * something has called the onFailure method of the listener. - */ - @Override - protected void nonStateTransform( - Collection requests, - TransformState prevState, - ActionListener listener - ) { - listener.onFailure(new IllegalStateException("Fake exception")); - } - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java index 6cdca0cb3b24d..cac7c91f73ed1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java @@ -7,77 +7,40 @@ package org.elasticsearch.xpack.security.action.reservedstate; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.TransformState; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Collections; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; +import static org.hamcrest.Matchers.nullValue; /** * Tests that the ReservedRoleMappingAction does validation, can add and remove role mappings */ public class ReservedRoleMappingActionTests extends ESTestCase { + private TransformState processJSON(ReservedRoleMappingAction action, TransformState prevState, String json) throws Exception { try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { var content = action.fromXContent(parser); var state = action.transform(content, prevState); - - CountDownLatch latch = new CountDownLatch(1); - AtomicReference> updatedKeys = new AtomicReference<>(); - AtomicReference error = new AtomicReference<>(); - state.nonStateTransform().accept(new ActionListener<>() { - @Override - public void onResponse(NonStateTransformResult nonStateTransformResult) { - updatedKeys.set(nonStateTransformResult.updatedKeys()); - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - error.set(e); - latch.countDown(); - } - }); - - latch.await(); - if (error.get() != null) { - throw error.get(); - } - return new TransformState(state.state(), updatedKeys.get()); + assertThat(state.nonStateTransform(), nullValue()); + return state; } } public void testValidation() { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - + ReservedRoleMappingAction action = new ReservedRoleMappingAction(); String badPolicyJSON = """ { "everyone_kibana": { @@ -97,7 +60,6 @@ public void testValidation() { } } }"""; - assertEquals( "failed to parse role-mapping [everyone_fleet]. missing field [rules]", expectThrows(ParsingException.class, () -> processJSON(action, prevState, badPolicyJSON)).getMessage() @@ -105,13 +67,9 @@ public void testValidation() { } public void testAddRemoveRoleMapping() throws Exception { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - + ReservedRoleMappingAction action = new ReservedRoleMappingAction(); String emptyJSON = ""; TransformState updatedState = processJSON(action, prevState, emptyJSON); @@ -147,102 +105,4 @@ public void testAddRemoveRoleMapping() throws Exception { updatedState = processJSON(action, prevState, emptyJSON); assertThat(updatedState.keys(), empty()); } - - @SuppressWarnings("unchecked") - public void testNonStateTransformWaitsOnAsyncActions() throws Exception { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - - doAnswer(invocation -> { - new Thread(() -> { - // Simulate put role mapping async action taking a while - try { - Thread.sleep(1_000); - ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - doAnswer(invocation -> { - new Thread(() -> { - // Simulate delete role mapping async action taking a while - try { - Thread.sleep(1_000); - ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - - return null; - }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); - - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); - TransformState updatedState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - - String json = """ - { - "everyone_kibana": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_reserved": true - } - }, - "everyone_fleet": { - "enabled": true, - "roles": [ "fleet_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "a9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_reserved": true - } - } - }"""; - - assertEquals( - "err_done", - expectThrows(IllegalStateException.class, () -> processJSON(action, new TransformState(state, Collections.emptySet()), json)) - .getMessage() - ); - - // Now that we've tested that we wait on putRoleMapping correctly, let it finish without exception, so we can test error on delete - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - updatedState = processJSON(action, updatedState, json); - assertThat(updatedState.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); - - final TransformState currentState = new TransformState(updatedState.state(), updatedState.keys()); - - assertEquals("err_done", expectThrows(IllegalStateException.class, () -> processJSON(action, currentState, "")).getMessage()); - } - - @SuppressWarnings("unchecked") - private NativeRoleMappingStore mockNativeRoleMappingStore() { - final NativeRoleMappingStore nativeRoleMappingStore = spy( - new NativeRoleMappingStore(Settings.EMPTY, mock(Client.class), mock(SecurityIndexManager.class), mock(ScriptService.class)) - ); - - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); - - return nativeRoleMappingStore; - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java deleted file mode 100644 index 038e673e07862..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security.action.rolemapping; - -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; - -import java.util.Collections; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.mockito.Mockito.mock; - -public class TransportDeleteRoleMappingActionTests extends ESTestCase { - public void testReservedStateHandler() { - var store = mock(NativeRoleMappingStore.class); - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - mock(ThreadPool.class), - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - var action = new TransportDeleteRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); - - assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); - - var deleteRequest = new DeleteRoleMappingRequest(); - deleteRequest.setName("kibana_all"); - assertThat(action.modifiedKeys(deleteRequest), containsInAnyOrder("kibana_all")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 58a8e8e3d4751..6f789a10a3a6c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,16 +9,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -33,7 +29,6 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; @@ -60,7 +55,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); @@ -99,39 +94,7 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecuteProtected(mock(Task.class), request, future); + action.doExecute(mock(Task.class), request, future); return future.get(); } - - public void testReservedStateHandler() throws Exception { - assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); - String json = """ - { - "everyone_kibana": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" - } - }, - "everyone_fleet": { - "enabled": true, - "roles": [ "fleet_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7" - } - } - }"""; - - try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { - ReservedRoleMappingAction roleMappingAction = new ReservedRoleMappingAction(store); - var parsedResult = roleMappingAction.fromXContent(parser); - - for (var mapping : parsedResult) { - assertThat(action.modifiedKeys(PutRoleMappingRequest.fromMapping(mapping)), containsInAnyOrder(mapping.getName())); - } - } - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 7752b85c6345c..0871e2568d225 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -1158,7 +1158,9 @@ private static Tuple, Map> newApiKeyDocument getFastStoredHashAlgoForTests().hash(new SecureString(key.toCharArray())), "test", authentication, - type == ApiKey.Type.CROSS_CLUSTER ? Set.of() : Collections.singleton(SUPERUSER_ROLE_DESCRIPTOR), + type == ApiKey.Type.CROSS_CLUSTER + ? Set.of() + : ApiKeyService.removeUserRoleDescriptorDescriptions(Set.of(SUPERUSER_ROLE_DESCRIPTOR)), Instant.now(), Instant.now().plus(expiry), keyRoles, @@ -1316,22 +1318,6 @@ public void testParseRoleDescriptorsMap() throws Exception { assertThat(roleDescriptors, hasSize(1)); assertThat(roleDescriptors.get(0), equalTo(roleARoleDescriptor)); - Map superUserRdMap; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - superUserRdMap = XContentHelper.convertToMap( - XContentType.JSON.xContent(), - BytesReference.bytes(SUPERUSER_ROLE_DESCRIPTOR.toXContent(builder, ToXContent.EMPTY_PARAMS, true)).streamInput(), - false - ); - } - roleDescriptors = service.parseRoleDescriptors( - apiKeyId, - Map.of(SUPERUSER_ROLE_DESCRIPTOR.getName(), superUserRdMap), - randomApiKeyRoleType() - ); - assertThat(roleDescriptors, hasSize(1)); - assertThat(roleDescriptors.get(0), equalTo(SUPERUSER_ROLE_DESCRIPTOR)); - final Map legacySuperUserRdMap; try (XContentBuilder builder = JsonXContent.contentBuilder()) { legacySuperUserRdMap = XContentHelper.convertToMap( @@ -1812,7 +1798,10 @@ public void testApiKeyDocCache() throws IOException, ExecutionException, Interru RoleReference.ApiKeyRoleType.LIMITED_BY ); assertEquals(1, limitedByRoleDescriptors.size()); - assertEquals(SUPERUSER_ROLE_DESCRIPTOR, limitedByRoleDescriptors.get(0)); + RoleDescriptor superuserWithoutDescription = ApiKeyService.removeUserRoleDescriptorDescriptions(Set.of(SUPERUSER_ROLE_DESCRIPTOR)) + .iterator() + .next(); + assertEquals(superuserWithoutDescription, limitedByRoleDescriptors.get(0)); if (metadata == null) { assertNull(cachedApiKeyDoc.metadataFlattened); } else { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java index e06f6f212c687..8295f028588cc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -195,7 +194,6 @@ private void testLogging( ); final MockLogAppender mockAppender = new MockLogAppender(); try (var ignored = mockAppender.capturing(timerLogger.getName())) { - Loggers.addAppender(timerLogger, mockAppender); mockAppender.addExpectation(expectation); checker.accept(List.of()); mockAppender.assertAllExpectationsMatched(); diff --git a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider index 77c38d302d9c9..3d17572429bac 100644 --- a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider +++ b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider @@ -6,4 +6,3 @@ # org.elasticsearch.xpack.security.LocalReservedSecurityStateHandlerProvider -org.elasticsearch.xpack.security.LocalReservedUnstableSecurityStateHandlerProvider diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java index 4f7b16380d0f8..14417c693f280 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java @@ -33,11 +33,13 @@ public static class Request extends AcknowledgedRequest { private final String nodeId; public Request(String nodeId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = nodeId; } public Request(StreamInput in) throws IOException { - if (in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { // effectively super(in): @@ -50,7 +52,7 @@ public Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || out.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { super.writeTo(out); diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java index b82e6a08fb269..7266f8ff71129 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java @@ -43,6 +43,7 @@ public static class Request extends MasterNodeRequest { private final String[] nodeIds; public Request(String... nodeIds) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.nodeIds = nodeIds; } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java index bff2b0b1793b1..d857ee4b322d3 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java @@ -90,6 +90,7 @@ public Request( @Nullable String targetNodeName, @Nullable TimeValue gracePeriod ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = nodeId; this.type = type; this.reason = reason; @@ -100,7 +101,8 @@ public Request( @UpdateForV9 // TODO call super(in) instead of explicitly reading superclass contents once bwc no longer needed public Request(StreamInput in) throws IOException { - if (in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { // effectively super(in): @@ -126,7 +128,7 @@ public Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || out.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { super.writeTo(out); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml index bd40e29d0b675..671fb24715631 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -98,129 +98,3 @@ setup: - match: jobs: [] - ---- -"Test get all jobs": - - - skip: - awaits_fix: "Job ordering isn't guaranteed right now, cannot test" - - - do: - headers: - Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser - rollup.put_job: - id: foo - body: > - { - "index_pattern": "foo", - "rollup_index": "foo_rollup", - "cron": "*/30 * * * * ?", - "page_size" :10, - "groups" : { - "date_histogram": { - "field": "the_field", - "calendar_interval": "1h" - } - }, - "metrics": [ - { - "field": "value_field", - "metrics": ["min", "max", "sum"] - } - ] - } - - is_true: acknowledged - - - do: - headers: - Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser - rollup.put_job: - id: bar - body: > - { - "index_pattern": "bar", - "rollup_index": "foo_rollup", - "cron": "*/30 * * * * ?", - "page_size" :10, - "groups" : { - "date_histogram": { - "field": "the_field", - "calendar_interval": "1h" - } - }, - "metrics": [ - { - "field": "value_field", - "metrics": ["min", "max", "sum"] - } - ] - } - - is_true: acknowledged - - - do: - rollup.get_jobs: - id: "_all" - - - length: { jobs: 2 } - - match: - jobs: - - config: - id: "foo" - index_pattern: "foo" - rollup_index: "foo_rollup" - cron: "*/30 * * * * ?" - page_size: 10 - groups : - date_histogram: - calendar_interval: "1h" - field: "the_field" - time_zone: "UTC" - metrics: - - field: "value_field" - metrics: - - "min" - - "max" - - "sum" - timeout: "20s" - stats: - pages_processed: 0 - documents_processed: 0 - rollups_indexed: 0 - trigger_count: 0 - status: - job_state: "stopped" - - config: - id: "bar" - index_pattern: "bar" - rollup_index: "foo_rollup" - cron: "*/30 * * * * ?" - page_size: 10 - groups : - date_histogram: - calendar_interval: "1h" - field: "the_field" - time_zone: "UTC" - metrics: - - field: "value_field" - metrics: - - "min" - - "max" - - "sum" - timeout: "20s" - stats: - pages_processed: 0 - documents_processed: 0 - rollups_indexed: 0 - trigger_count: 0 - search_failures: 0 - index_failures: 0 - index_time_in_ms: 0 - index_total: 0 - search_time_in_ms: 0 - search_total: 0 - processing_time_in_ms: 0 - processing_total: 0 - status: - job_state: "stopped" - - diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 4fdb2d05c5326..30323a1d7d363 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -107,6 +107,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // Kibana reporting template /////////////////////////////////// public static final String KIBANA_REPORTING_INDEX_TEMPLATE_NAME = ".kibana-reporting"; + public static final String KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME = "kibana-reporting@settings"; public StackTemplateRegistry( Settings nodeSettings, @@ -229,6 +230,13 @@ protected List getLifecyclePolicies() { REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE, ADDITIONAL_TEMPLATE_VARIABLES + ), + new IndexTemplateConfig( + KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + "/kibana-reporting@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) )) { try { diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 782fe3b41ae3b..abb2d5765b128 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -429,6 +429,7 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { versions.put(StackTemplateRegistry.METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); + versions.put(StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(versions, nodes); client.setVerifier((action, request, listener) -> { if (action instanceof PutComponentTemplateAction) { @@ -484,6 +485,10 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) ); + versions.put( + StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) + ); ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(versions, nodes); registry.clusterChanged(higherVersionEvent); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java index 481fe40a764a6..177f00c704c3c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.transform.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -34,6 +36,7 @@ public class TransportGetCheckpointNodeAction extends HandledTransportAction { + private static final Logger logger = LogManager.getLogger(TransportGetCheckpointNodeAction.class); private final IndicesService indicesService; @Inject @@ -83,17 +86,27 @@ protected static void getGlobalCheckpoints( return; } } - final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); - final IndexShard indexShard = indexService.getShard(shardId.id()); - checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { - long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; - Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); - return seqNumbers; - }); - checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); - ++numProcessedShards; + try { + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.id()); + + checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { + long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; + Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); + return seqNumbers; + }); + checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); + ++numProcessedShards; + } catch (Exception e) { + logger.atDebug() + .withThrowable(e) + .log("Failed to get checkpoint for shard [{}] and index [{}]", shardId.getId(), shardId.getIndexName()); + listener.onFailure(e); + return; + } } + listener.onResponse(new Response(checkpointsByIndexOfThisNode)); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index ed0f721f5f7f0..df8c3f62034e5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -193,7 +193,11 @@ protected void handleBulkResponse(BulkResponse bulkResponse, ActionListener listener) { }, listener::onFailure); var deducedDestIndexMappings = new SetOnce>(); - var shouldMaybeCreateDestIndexForUnattended = context.getCheckpoint() == 0 - && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + + // if the unattended transform had not created the destination index yet, or if the destination index was deleted for any + // type of transform during the last run, then we try to create the destination index. + // This is important to create the destination index explicitly before indexing documents. Otherwise, the destination + // index aliases may be missing. + var shouldMaybeCreateDestIndex = isFirstUnattendedRun() || context.shouldRecreateDestinationIndex(); ActionListener> fieldMappingsListener = ActionListener.wrap(destIndexMappings -> { if (destIndexMappings.isEmpty() == false) { @@ -359,11 +363,12 @@ protected void onStart(long now, ActionListener listener) { // ... otherwise we fall back to index mappings deduced based on source indices this.fieldMappings = deducedDestIndexMappings.get(); } - // Since the unattended transform could not have created the destination index yet, we do it here. - // This is important to create the destination index explicitly before indexing first documents. Otherwise, the destination - // index aliases may be missing. - if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndexForUnattended) { - doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener); + + if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndex) { + doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener.delegateFailure((delegate, response) -> { + context.setShouldRecreateDestinationIndex(false); + delegate.onResponse(response); + })); } else { configurationReadyListener.onResponse(null); } @@ -380,7 +385,7 @@ protected void onStart(long now, ActionListener listener) { deducedDestIndexMappings.set(validationResponse.getDestIndexMappings()); if (isContinuous()) { transformsConfigManager.getTransformConfiguration(getJobId(), ActionListener.wrap(config -> { - if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndexForUnattended == false) { + if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndex == false) { logger.trace("[{}] transform config has not changed.", getJobId()); configurationReadyListener.onResponse(null); } else { @@ -415,7 +420,7 @@ protected void onStart(long now, ActionListener listener) { }, listener::onFailure); Instant instantOfTrigger = Instant.ofEpochMilli(now); - // If we are not on the initial batch checkpoint and its the first pass of whatever continuous checkpoint we are on, + // If we are not on the initial batch checkpoint and it's the first pass of whatever continuous checkpoint we are on, // we should verify if there are local changes based on the sync config. If not, do not proceed further and exit. if (context.getCheckpoint() > 0 && initialRun()) { checkpointProvider.sourceHasChanged(getLastCheckpoint(), ActionListener.wrap(hasChanged -> { @@ -436,8 +441,7 @@ protected void onStart(long now, ActionListener listener) { hasSourceChanged = true; listener.onFailure(failure); })); - } else if (context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings())) { - // this transform runs in unattended mode and has never run, to go on + } else if (shouldMaybeCreateDestIndex) { validate(changedSourceListener); } else { hasSourceChanged = true; @@ -447,6 +451,13 @@ protected void onStart(long now, ActionListener listener) { } } + /** + * Returns true if this transform runs in unattended mode and has never run. + */ + private boolean isFirstUnattendedRun() { + return context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + } + protected void initializeFunction() { // create the function function = FunctionFactory.create(getConfig()); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java index 8618b01a0440b..8bf859a020ba4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.tasks.TaskCancelledException; @@ -63,7 +64,7 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti } if (unwrappedThrowable instanceof ElasticsearchException elasticsearchException) { - if (isExceptionIrrecoverable(elasticsearchException)) { + if (isExceptionIrrecoverable(elasticsearchException) && isNotIndexNotFoundException(elasticsearchException)) { return elasticsearchException; } } @@ -72,6 +73,15 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti return null; } + /** + * We can safely recover from IndexNotFoundExceptions on Bulk responses. + * If the transform is running, the next checkpoint will recreate the index. + * If the transform is not running, the next start request will recreate the index. + */ + private static boolean isNotIndexNotFoundException(ElasticsearchException elasticsearchException) { + return elasticsearchException instanceof IndexNotFoundException == false; + } + public static boolean isExceptionIrrecoverable(ElasticsearchException elasticsearchException) { if (IRRECOVERABLE_REST_STATUSES.contains(elasticsearchException.status())) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java index 25c7f9efa7992..950e593165f01 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.seqno.SeqNoStats; @@ -47,6 +48,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -68,35 +71,9 @@ public void setUp() throws Exception { null, (TaskManager) null ); - IndexShard indexShardA0 = mock(IndexShard.class); - when(indexShardA0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_000)); - IndexShard indexShardA1 = mock(IndexShard.class); - when(indexShardA1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_001)); - IndexShard indexShardB0 = mock(IndexShard.class); - when(indexShardB0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_000)); - IndexShard indexShardB1 = mock(IndexShard.class); - when(indexShardB1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_001)); - Settings commonIndexSettings = Settings.builder() - .put(SETTING_VERSION_CREATED, 1_000_000) - .put(SETTING_NUMBER_OF_SHARDS, 2) - .put(SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - IndexService indexServiceA = mock(IndexService.class); - when(indexServiceA.getIndexSettings()).thenReturn( - new IndexSettings(IndexMetadata.builder("my-index-A").settings(commonIndexSettings).build(), Settings.EMPTY) - ); - when(indexServiceA.getShard(0)).thenReturn(indexShardA0); - when(indexServiceA.getShard(1)).thenReturn(indexShardA1); - IndexService indexServiceB = mock(IndexService.class); - when(indexServiceB.getIndexSettings()).thenReturn( - new IndexSettings(IndexMetadata.builder("my-index-B").settings(commonIndexSettings).build(), Settings.EMPTY) - ); - when(indexServiceB.getShard(0)).thenReturn(indexShardB0); - when(indexServiceB.getShard(1)).thenReturn(indexShardB1); + indicesService = mock(IndicesService.class); when(indicesService.clusterService()).thenReturn(clusterService); - when(indicesService.indexServiceSafe(new Index("my-index-A", "A"))).thenReturn(indexServiceA); - when(indicesService.indexServiceSafe(new Index("my-index-B", "B"))).thenReturn(indexServiceB); task = new CancellableTask(123, "type", "action", "description", new TaskId("dummy-node:456"), Map.of()); clock = new FakeClock(Instant.now()); @@ -117,6 +94,7 @@ public void testGetGlobalCheckpointsWithHighTimeout() throws InterruptedExceptio } private void testGetGlobalCheckpointsSuccess(TimeValue timeout) throws InterruptedException { + mockIndexServiceResponse(); CountDownLatch latch = new CountDownLatch(1); SetOnce responseHolder = new SetOnce<>(); SetOnce exceptionHolder = new SetOnce<>(); @@ -136,7 +114,38 @@ private void testGetGlobalCheckpointsSuccess(TimeValue timeout) throws Interrupt assertThat(exceptionHolder.get(), is(nullValue())); } + private void mockIndexServiceResponse() { + IndexShard indexShardA0 = mock(IndexShard.class); + when(indexShardA0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_000)); + IndexShard indexShardA1 = mock(IndexShard.class); + when(indexShardA1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_001)); + IndexShard indexShardB0 = mock(IndexShard.class); + when(indexShardB0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_000)); + IndexShard indexShardB1 = mock(IndexShard.class); + when(indexShardB1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_001)); + Settings commonIndexSettings = Settings.builder() + .put(SETTING_VERSION_CREATED, 1_000_000) + .put(SETTING_NUMBER_OF_SHARDS, 2) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + IndexService indexServiceA = mock(IndexService.class); + when(indexServiceA.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-A").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceA.getShard(0)).thenReturn(indexShardA0); + when(indexServiceA.getShard(1)).thenReturn(indexShardA1); + IndexService indexServiceB = mock(IndexService.class); + when(indexServiceB.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-B").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceB.getShard(0)).thenReturn(indexShardB0); + when(indexServiceB.getShard(1)).thenReturn(indexShardB1); + when(indicesService.indexServiceSafe(new Index("my-index-A", "A"))).thenReturn(indexServiceA); + when(indicesService.indexServiceSafe(new Index("my-index-B", "B"))).thenReturn(indexServiceB); + } + public void testGetGlobalCheckpointsFailureDueToTaskCancelled() throws InterruptedException { + mockIndexServiceResponse(); TaskCancelHelper.cancel(task, "due to apocalypse"); CountDownLatch latch = new CountDownLatch(1); @@ -156,6 +165,7 @@ public void testGetGlobalCheckpointsFailureDueToTaskCancelled() throws Interrupt } public void testGetGlobalCheckpointsFailureDueToTimeout() throws InterruptedException { + mockIndexServiceResponse(); // Move the current time past the timeout. clock.advanceTimeBy(Duration.ofSeconds(10)); @@ -184,4 +194,24 @@ public void testGetGlobalCheckpointsFailureDueToTimeout() throws InterruptedExce is(equalTo("Transform checkpointing timed out on node [dummy-node] after [5s] having processed [0] of [4] shards")) ); } + + public void testIndexNotFoundException() throws InterruptedException { + var expectedException = new IndexNotFoundException("some index"); + when(indicesService.indexServiceSafe(any())).thenThrow(expectedException); + + var exceptionHolder = new SetOnce(); + TransportGetCheckpointNodeAction.getGlobalCheckpoints( + indicesService, + task, + shards, + TimeValue.timeValueSeconds(5), + clock, + ActionListener.wrap(r -> { + fail("Test is meant to call the onFailure method."); + }, exceptionHolder::set) + ); + + assertNotNull("Listener's onFailure handler was not called.", exceptionHolder.get()); + assertThat(exceptionHolder.get(), sameInstance(expectedException)); + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index fe54847af0404..eeef51bcbcb06 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -10,15 +10,17 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -27,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.script.ScriptException; @@ -35,7 +38,6 @@ import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.indexing.IndexerState; @@ -75,6 +77,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.core.transform.transforms.DestConfigTests.randomDestConfig; @@ -85,6 +88,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.matchesRegex; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; @@ -99,8 +103,11 @@ */ public class TransformIndexerFailureHandlingTests extends ESTestCase { - private Client client; private ThreadPool threadPool; + private static final Function EMPTY_BULK_RESPONSE = bulkRequest -> new BulkResponse( + new BulkItemResponse[0], + 100 + ); static class MockedTransformIndexer extends ClientTransformIndexer { @@ -110,13 +117,13 @@ static class MockedTransformIndexer extends ClientTransformIndexer { // used for synchronizing with the test private CountDownLatch latch; + private int doProcessCount; MockedTransformIndexer( ThreadPool threadPool, ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, TransformExtension transformExtension, - String executorName, IndexBasedTransformConfigManager transformsConfigManager, CheckpointProvider checkpointProvider, TransformConfig transformConfig, @@ -127,7 +134,8 @@ static class MockedTransformIndexer extends ClientTransformIndexer { TransformContext context, Function searchFunction, Function bulkFunction, - Function deleteByQueryFunction + Function deleteByQueryFunction, + int doProcessCount ) { super( threadPool, @@ -157,6 +165,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { this.searchFunction = searchFunction; this.bulkFunction = bulkFunction; this.deleteByQueryFunction = deleteByQueryFunction; + this.doProcessCount = doProcessCount; } public void initialize() { @@ -182,12 +191,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener throw new IllegalStateException(e); } - try { - SearchResponse response = searchFunction.apply(buildSearchRequest().v2()); - nextPhase.onResponse(response); - } catch (Exception e) { - nextPhase.onFailure(e); - } + ActionListener.run(nextPhase, l -> ActionListener.respondAndRelease(l, searchFunction.apply(buildSearchRequest().v2()))); } @Override @@ -278,12 +282,22 @@ void doGetFieldMappings(ActionListener> fieldMappingsListene protected void persistState(TransformState state, ActionListener listener) { listener.onResponse(null); } + + @Override + protected IterationResult doProcess(SearchResponse searchResponse) { + if (doProcessCount > 0) { + doProcessCount -= 1; + // pretend that we processed 10k documents for each call + getStats().incrementNumDocuments(10_000); + return new IterationResult<>(Stream.of(new IndexRequest()), new TransformIndexerPosition(null, null), false); + } + return super.doProcess(searchResponse); + } } @Before public void setUpMocks() { threadPool = createThreadPool(); - client = new NoOpClient(threadPool); } @After @@ -325,17 +339,7 @@ public void testPageSizeAdapt() throws Exception { TransformAuditor auditor = MockTransformAuditor.createMockAuditor(); TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); final CountDownLatch latch = indexer.newLatch(1); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); @@ -415,7 +419,6 @@ public void testDoProcessAggNullCheck() { bulkFunction, null, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -476,17 +479,7 @@ public void testScriptError() throws Exception { TransformContext.Listener contextListener = createContextListener(failIndexerCalled, failureMessage); TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, contextListener); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); final CountDownLatch latch = indexer.newLatch(1); @@ -542,7 +535,10 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti ); try { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - Function searchFunction = searchRequest -> searchResponse; + Function searchFunction = searchRequest -> { + searchResponse.mustIncRef(); + return searchResponse; + }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -571,7 +567,6 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti bulkFunction, deleteByQueryFunction, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -635,7 +630,10 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce ); try { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - Function searchFunction = searchRequest -> searchResponse; + Function searchFunction = searchRequest -> { + searchResponse.mustIncRef(); + return searchResponse; + }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -670,7 +668,6 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce bulkFunction, deleteByQueryFunction, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -744,6 +741,7 @@ public SearchResponse apply(SearchRequest searchRequest) { new ShardSearchFailure[] { new ShardSearchFailure(new Exception()) } ); } + searchResponse.mustIncRef(); return searchResponse; } }; @@ -764,7 +762,6 @@ public SearchResponse apply(SearchRequest searchRequest) { bulkFunction, null, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -865,17 +862,7 @@ public void testHandleFailureAuditing() { ) ); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); indexer.handleFailure( new SearchPhaseExecutionException( @@ -936,6 +923,151 @@ public void testHandleFailureAuditing() { auditor.assertAllExpectationsMatched(); } + /** + * Given no bulk upload errors + * When we run the indexer + * Then we should not fail or recreate the destination index + */ + public void testHandleBulkResponseWithNoFailures() throws Exception { + var indexer = runIndexer(createMockIndexer(returnHit(), EMPTY_BULK_RESPONSE)); + assertThat(indexer.getStats().getIndexFailures(), is(0L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertNull(indexer.context.getLastFailure()); + } + + private static TransformIndexer runIndexer(MockedTransformIndexer indexer) throws Exception { + var latch = indexer.newLatch(1); + indexer.start(); + assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + latch.countDown(); + assertBusy(() -> assertThat(indexer.getState(), equalTo(IndexerState.STARTED)), 10, TimeUnit.SECONDS); + return indexer; + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction + ) { + return createMockIndexer(searchFunction, bulkFunction, mock(TransformContext.Listener.class)); + } + + private static Function returnHit() { + return request -> new SearchResponse( + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, + "", + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + } + + /** + * Given an irrecoverable bulk upload error + * When we run the indexer + * Then we should fail without retries and not recreate the destination index + */ + public void testHandleBulkResponseWithIrrecoverableFailures() throws Exception { + var failCalled = new AtomicBoolean(); + var indexer = runIndexer( + createMockIndexer( + returnHit(), + bulkResponseWithError(new ResourceNotFoundException("resource not found error")), + createContextListener(failCalled, new AtomicReference<>()) + ) + ); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertTrue(failCalled.get()); + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction, + TransformContext.Listener listener + ) { + return createMockIndexer( + new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + null, + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + new SettingsConfig.Builder().setMaxPageSearchSize(randomBoolean() ? null : randomIntBetween(500, 10_000)).build(), + null, + null, + null, + null + ), + new AtomicReference<>(IndexerState.STOPPED), + searchFunction, + bulkFunction, + null, + threadPool, + mock(TransformAuditor.class), + new TransformContext(TransformTaskState.STARTED, "", 0, listener), + 1 + ); + } + + private static Function bulkResponseWithError(Exception e) { + return bulkRequest -> new BulkResponse( + new BulkItemResponse[] { + BulkItemResponse.failure(1, DocWriteRequest.OpType.INDEX, new BulkItemResponse.Failure("the_index", "id", e)) }, + 100 + ); + } + + /** + * Given an IndexNotFound bulk upload error + * When we run the indexer + * Then we should fail with retries and recreate the destination index + */ + public void testHandleBulkResponseWithIndexNotFound() throws Exception { + var indexer = runIndexerWithBulkResponseError(new IndexNotFoundException("Some Error")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertTrue(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + + private TransformIndexer runIndexerWithBulkResponseError(Exception e) throws Exception { + return runIndexer(createMockIndexer(returnHit(), bulkResponseWithError(e))); + } + + private static BulkIndexingException bulkIndexingException(TransformIndexer indexer) { + var lastFailure = indexer.context.getLastFailure(); + assertNotNull(lastFailure); + assertThat(lastFailure, instanceOf(BulkIndexingException.class)); + return (BulkIndexingException) lastFailure; + } + + /** + * Given a recoverable bulk upload error + * When we run the indexer + * Then we should fail with retries and not recreate the destination index + */ + public void testHandleBulkResponseWithNoIrrecoverableFailures() throws Exception { + var indexer = runIndexerWithBulkResponseError(new EsRejectedExecutionException("es rejected execution")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + public void testHandleFailure() { testHandleFailure(0, 5, 0, 0); testHandleFailure(5, 0, 5, 2); @@ -996,17 +1128,7 @@ private void testHandleFailure( ) ); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); for (int i = 0; i < expectedEffectiveNumFailureRetries; ++i) { indexer.handleFailure(new Exception("exception no. " + (i + 1))); @@ -1039,14 +1161,26 @@ private MockedTransformIndexer createMockIndexer( Function bulkFunction, Function deleteByQueryFunction, ThreadPool threadPool, - String executorName, TransformAuditor auditor, TransformContext context + ) { + return createMockIndexer(config, state, searchFunction, bulkFunction, deleteByQueryFunction, threadPool, auditor, context, 0); + } + + private MockedTransformIndexer createMockIndexer( + TransformConfig config, + AtomicReference state, + Function searchFunction, + Function bulkFunction, + Function deleteByQueryFunction, + ThreadPool threadPool, + TransformAuditor auditor, + TransformContext context, + int doProcessCount ) { IndexBasedTransformConfigManager transformConfigManager = mock(IndexBasedTransformConfigManager.class); doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = invocationOnMock.getArgument(1); listener.onResponse(config); return null; }).when(transformConfigManager).getTransformConfiguration(any(), any()); @@ -1055,7 +1189,6 @@ private MockedTransformIndexer createMockIndexer( mock(ClusterService.class), mock(IndexNameExpressionResolver.class), mock(TransformExtension.class), - executorName, transformConfigManager, mock(CheckpointProvider.class), config, @@ -1066,7 +1199,8 @@ private MockedTransformIndexer createMockIndexer( context, searchFunction, bulkFunction, - deleteByQueryFunction + deleteByQueryFunction, + doProcessCount ); indexer.initialize(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index a474976cf9dfa..01a2db839b7d8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -88,7 +88,7 @@ public class TransformIndexerStateTests extends ESTestCase { private static final SearchResponse ONE_HIT_SEARCH_RESPONSE = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java index b71156cad5adf..9a0431d40a972 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.shard.ShardId; @@ -27,116 +28,27 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentLocation; +import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.Collectors; public class ExceptionRootCauseFinderTests extends ESTestCase { public void testGetFirstIrrecoverableExceptionFromBulkResponses() { - Map bulkItemResponses = new HashMap<>(); - - int id = 1; - // 1 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error") - ) - ) - ); - // 2 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new ResourceNotFoundException("resource not found error")) - ) - ); - // 3 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new IllegalArgumentException("illegal argument error")) - ) - ); - // 4 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new EsRejectedExecutionException("es rejected execution")) - ) - ); - // 5 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new TranslogException(new ShardId("the_index", "uid", 0), "translog error")) - ) - ); - // 6 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED) - ) - ) - ); - // 7 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN) - ) - ) - ); - // 8 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS) - ) - ) - ); - // 9 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR) - ) - ) + Map bulkItemResponses = bulkItemResponses( + new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error"), + new ResourceNotFoundException("resource not found error"), + new IllegalArgumentException("illegal argument error"), + new EsRejectedExecutionException("es rejected execution"), + new TranslogException(new ShardId("the_index", "uid", 0), "translog error"), + new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED), + new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN), + new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS), + new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR), + new IndexNotFoundException("some missing index") ); assertFirstException(bulkItemResponses.values(), DocumentParsingException.class, "document parsing error"); @@ -157,6 +69,14 @@ public void testGetFirstIrrecoverableExceptionFromBulkResponses() { assertNull(ExceptionRootCauseFinder.getFirstIrrecoverableExceptionFromBulkResponses(bulkItemResponses.values())); } + private static Map bulkItemResponses(Exception... exceptions) { + var id = new AtomicInteger(1); + return Arrays.stream(exceptions) + .map(exception -> new BulkItemResponse.Failure("the_index", "id", exception)) + .map(failure -> BulkItemResponse.failure(id.get(), OpType.INDEX, failure)) + .collect(Collectors.toMap(response -> id.getAndIncrement(), Function.identity())); + } + public void testIsIrrecoverable() { assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new MapperException("mappings problem"))); assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new TaskCancelledException("cancelled task"))); @@ -174,6 +94,7 @@ public void testIsIrrecoverable() { assertTrue( ExceptionRootCauseFinder.isExceptionIrrecoverable(new DocumentParsingException(new XContentLocation(1, 2), "parse error")) ); + assertTrue(ExceptionRootCauseFinder.isExceptionIrrecoverable(new IndexNotFoundException("some missing index"))); } private static void assertFirstException(Collection bulkItemResponses, Class expectedClass, String message) { diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index a07544ff68c9a..69709d638a771 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -1061,5 +1061,10 @@ public void write(XContentBuilder b) throws IOException { } storedValues = emptyList(); } + + @Override + public String fieldName() { + return name(); + } } } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 3d9e7f3828bc7..17363d58545c2 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -20,11 +20,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslVerificationMode; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.fixtures.smb.SmbTestContainer; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -187,11 +190,16 @@ public void setupRoleMappings() throws Exception { Map> futures = Maps.newLinkedHashMapWithExpectedSize(content.size()); for (int i = 0; i < content.size(); i++) { final String name = "external_" + i; - final PutRoleMappingRequestBuilder builder = new PutRoleMappingRequestBuilder(client()).source( - name, - new BytesArray(content.get(i)), - XContentType.JSON - ); + final PutRoleMappingRequestBuilder builder; + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + new BytesArray(content.get(i)), + XContentType.JSON + ) + ) { + builder = new PutRoleMappingRequestBuilder(client()).source(name, parser); + } futures.put(name, builder.execute()); } for (String mappingName : futures.keySet()) {