diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle
index d3209ff27ce06..67878181a005d 100644
--- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle
+++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle
@@ -122,6 +122,36 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') {
.findAll { it != null }
}
+ // force IntelliJ to generate *.iml files for each imported module
+ tasks.register("enableExternalConfiguration") {
+ group = 'ide'
+ description = 'Enable per-module *.iml files'
+
+ doLast {
+ modifyXml('.idea/misc.xml') {xml ->
+ def externalStorageConfig = xml.component.find { it.'@name' == 'ExternalStorageConfigurationManager' }
+ if (externalStorageConfig) {
+ xml.remove(externalStorageConfig)
+ }
+ }
+ }
+ }
+
+ // modifies the idea module config to enable preview features on 'elasticsearch-native' module
+ tasks.register("enablePreviewFeatures") {
+ group = 'ide'
+ description = 'Enables preview features on native library module'
+ dependsOn tasks.named("enableExternalConfiguration")
+
+ doLast {
+ ['main', 'test'].each { sourceSet ->
+ modifyXml(".idea/modules/libs/native/elasticsearch.libs.elasticsearch-native.${sourceSet}.iml") { xml ->
+ xml.component.find { it.'@name' == 'NewModuleRootManager' }?.'@LANGUAGE_LEVEL' = 'JDK_21_PREVIEW'
+ }
+ }
+ }
+ }
+
tasks.register('buildDependencyArtifacts') {
group = 'ide'
description = 'Builds artifacts needed as dependency for IDE modules'
@@ -149,7 +179,10 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') {
testRunner = 'choose_per_test'
}
taskTriggers {
- afterSync tasks.named('configureIdeCheckstyle'), tasks.named('configureIdeaGradleJvm'), tasks.named('buildDependencyArtifacts')
+ afterSync tasks.named('configureIdeCheckstyle'),
+ tasks.named('configureIdeaGradleJvm'),
+ tasks.named('buildDependencyArtifacts'),
+ tasks.named('enablePreviewFeatures')
}
encodings {
encoding = 'UTF-8'
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
index 19309fe2da8a3..0bf4bcb33c23b 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
@@ -172,9 +172,6 @@ private static String distributionProjectName(ElasticsearchDistribution distribu
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_IRONBANK) {
return projectName + "ironbank-docker" + archString + "-export";
}
- if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_CLOUD) {
- return projectName + "cloud-docker" + archString + "-export";
- }
if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS) {
return projectName + "cloud-ess-docker" + archString + "-export";
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerCloudElasticsearchDistributionType.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerCloudElasticsearchDistributionType.java
deleted file mode 100644
index eb522dbcad5e2..0000000000000
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerCloudElasticsearchDistributionType.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the "Elastic License
- * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
- * Public License v 1"; you may not use this file except in compliance with, at
- * your election, the "Elastic License 2.0", the "GNU Affero General Public
- * License v3.0 only", or the "Server Side Public License, v 1".
- */
-
-package org.elasticsearch.gradle.internal.distribution;
-
-import org.elasticsearch.gradle.ElasticsearchDistributionType;
-
-public class DockerCloudElasticsearchDistributionType implements ElasticsearchDistributionType {
-
- DockerCloudElasticsearchDistributionType() {}
-
- @Override
- public String getName() {
- return "dockerCloud";
- }
-
- @Override
- public boolean isDocker() {
- return true;
- }
-}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java
index ba0e76b3f5b99..8f0951da86b88 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java
@@ -19,7 +19,6 @@ public class InternalElasticsearchDistributionTypes {
public static ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_UBI = new DockerUbiElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType();
- public static ElasticsearchDistributionType DOCKER_CLOUD = new DockerCloudElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType();
public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType();
@@ -29,7 +28,6 @@ public class InternalElasticsearchDistributionTypes {
DOCKER,
DOCKER_UBI,
DOCKER_IRONBANK,
- DOCKER_CLOUD,
DOCKER_CLOUD_ESS,
DOCKER_WOLFI
);
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java
index 77ab9557eac33..8e7884888b63b 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java
@@ -49,7 +49,6 @@
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.ALL_INTERNAL;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DEB;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER;
-import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK;
import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI;
@@ -149,7 +148,6 @@ private static Map> lifecycleTask
lifecyleTasks.put(DOCKER, project.getTasks().register(taskPrefix + ".docker"));
lifecyleTasks.put(DOCKER_UBI, project.getTasks().register(taskPrefix + ".docker-ubi"));
lifecyleTasks.put(DOCKER_IRONBANK, project.getTasks().register(taskPrefix + ".docker-ironbank"));
- lifecyleTasks.put(DOCKER_CLOUD, project.getTasks().register(taskPrefix + ".docker-cloud"));
lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess"));
lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi"));
lifecyleTasks.put(ARCHIVE, project.getTasks().register(taskPrefix + ".archives"));
diff --git a/build-tools-internal/src/main/resources/checkstyle_suppressions.xml b/build-tools-internal/src/main/resources/checkstyle_suppressions.xml
index fd01993951959..5fdfebf6849e7 100644
--- a/build-tools-internal/src/main/resources/checkstyle_suppressions.xml
+++ b/build-tools-internal/src/main/resources/checkstyle_suppressions.xml
@@ -35,6 +35,8 @@
+
+
diff --git a/docs/changelog/113563.yaml b/docs/changelog/113563.yaml
deleted file mode 100644
index 48484ead99d77..0000000000000
--- a/docs/changelog/113563.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-pr: 113563
-summary: Use ELSER By Default For Semantic Text
-area: Mapping
-type: enhancement
-issues: []
diff --git a/docs/reference/cluster/voting-exclusions.asciidoc b/docs/reference/cluster/voting-exclusions.asciidoc
index e5b8544a16554..55587a7010f8f 100644
--- a/docs/reference/cluster/voting-exclusions.asciidoc
+++ b/docs/reference/cluster/voting-exclusions.asciidoc
@@ -7,7 +7,6 @@
Adds or removes master-eligible nodes from the
<>.
-
[[voting-config-exclusions-api-request]]
==== {api-request-title}
@@ -28,7 +27,7 @@ users can use this API.
[[voting-config-exclusions-api-desc]]
==== {api-description-title}
-
+
By default, if there are more than three master-eligible nodes in the cluster
and you remove fewer than half of the master-eligible nodes in the cluster at
once, the <> automatically
@@ -50,14 +49,19 @@ use `DELETE /_cluster/voting_config_exclusions?wait_for_removal=false` to clear
the voting configuration exclusions without waiting for the nodes to leave the
cluster.
-If the API fails, you can safely retry it. Only a successful response
-guarantees that the node has been removed from the voting configuration and
-will not be reinstated.
+A response to `POST /_cluster/voting_config_exclusions` with an HTTP status
+code of `200 OK` guarantees that the node has been removed from the voting
+configuration and will not be reinstated until the voting configuration
+exclusions are cleared by calling `DELETE /_cluster/voting_config_exclusions`.
+If the call to `POST /_cluster/voting_config_exclusions` fails or returns a
+response with an HTTP status code other than `200 OK` then the node may not
+have been removed from the voting configuration. In that case, you may safely
+retry the call.
NOTE: Voting exclusions are required only when you remove at least half of the
master-eligible nodes from a cluster in a short time period. They are not
-required when removing master-ineligible nodes or fewer than half of the
-master-eligible nodes.
+required when removing master-ineligible nodes or when removing fewer than half
+of the master-eligible nodes.
For more information, see <>.
@@ -94,7 +98,7 @@ list. Defaults to `true`, meaning that all excluded nodes must be removed from
the cluster before this API takes any action. If set to `false` then the voting
configuration exclusions list is cleared even if some excluded nodes are still
in the cluster. Only applies to the `DELETE` form of this API.
-
+
[[voting-config-exclusions-api-example]]
==== {api-examples-title}
@@ -102,7 +106,7 @@ Adds nodes named `nodeName1` and `nodeName2` to the voting configuration
exclusions list:
[source,console]
---------------------------------------------------
+--------------------------------------------------
POST /_cluster/voting_config_exclusions?node_names=nodeName1,nodeName2
--------------------------------------------------
diff --git a/docs/reference/connector/docs/connectors-API-tutorial.asciidoc b/docs/reference/connector/docs/connectors-API-tutorial.asciidoc
index 5275f82de1b1f..4118c564e4759 100644
--- a/docs/reference/connector/docs/connectors-API-tutorial.asciidoc
+++ b/docs/reference/connector/docs/connectors-API-tutorial.asciidoc
@@ -367,7 +367,7 @@ Refer to the individual connectors-references,connector references for these con
====
We're using a self-managed connector in this tutorial.
To use these APIs with an Elastic managed connector, there's some extra setup for API keys.
-Refer to native-connectors-manage-API-keys for details.
+Refer to <> for details.
====
We're now ready to sync our PostgreSQL data to {es}.
diff --git a/docs/reference/connector/docs/connectors-servicenow.asciidoc b/docs/reference/connector/docs/connectors-servicenow.asciidoc
index 089a3b405d8a5..a02c418f11d74 100644
--- a/docs/reference/connector/docs/connectors-servicenow.asciidoc
+++ b/docs/reference/connector/docs/connectors-servicenow.asciidoc
@@ -81,7 +81,7 @@ Comma-separated list of services to fetch data from ServiceNow. If the value is
- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/incident-management/concept/c_IncidentManagement.html[Incident]
- link:https://docs.servicenow.com/bundle/tokyo-servicenow-platform/page/use/service-catalog-requests/task/t_AddNewRequestItems.html[Requested Item]
- link:https://docs.servicenow.com/bundle/tokyo-customer-service-management/page/product/customer-service-management/task/t_SearchTheKnowledgeBase.html[Knowledge]
-- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change Request]
+- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change request]
+
[NOTE]
====
@@ -89,7 +89,7 @@ If you have configured a custom service, the `*` value will not fetch data from
====
Default value is `*`. Examples:
+
- - `User, Incident, Requested Item, Knowledge, Change Request`
+ - `User, Incident, Requested Item, Knowledge, Change request`
- `*`
Enable document level security::
@@ -139,7 +139,7 @@ For default services, connectors use the following roles to find users who have
| Knowledge | `admin`, `knowledge`, `knowledge_manager`, `knowledge_admin`
-| Change Request | `admin`, `sn_change_read`, `itil`
+| Change request | `admin`, `sn_change_read`, `itil`
|===
For services other than these defaults, the connector iterates over access controls with `read` operations and finds the respective roles for those services.
@@ -305,7 +305,7 @@ Comma-separated list of services to fetch data from ServiceNow. If the value is
- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/incident-management/concept/c_IncidentManagement.html[Incident]
- link:https://docs.servicenow.com/bundle/tokyo-servicenow-platform/page/use/service-catalog-requests/task/t_AddNewRequestItems.html[Requested Item]
- link:https://docs.servicenow.com/bundle/tokyo-customer-service-management/page/product/customer-service-management/task/t_SearchTheKnowledgeBase.html[Knowledge]
-- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change Request]
+- link:https://docs.servicenow.com/bundle/tokyo-it-service-management/page/product/change-management/task/t_CreateAChange.html[Change request]
+
[NOTE]
====
@@ -313,7 +313,7 @@ If you have configured a custom service, the `*` value will not fetch data from
====
Default value is `*`. Examples:
+
- - `User, Incident, Requested Item, Knowledge, Change Request`
+ - `User, Incident, Requested Item, Knowledge, Change request`
- `*`
`retry_count`::
@@ -374,7 +374,7 @@ For default services, connectors use the following roles to find users who have
| Knowledge | `admin`, `knowledge`, `knowledge_manager`, `knowledge_admin`
-| Change Request | `admin`, `sn_change_read`, `itil`
+| Change request | `admin`, `sn_change_read`, `itil`
|===
For services other than these defaults, the connector iterates over access controls with `read` operations and finds the respective roles for those services.
diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc
index 1206cb02ba89a..38afc7c416f18 100644
--- a/docs/reference/inference/inference-apis.asciidoc
+++ b/docs/reference/inference/inference-apis.asciidoc
@@ -35,7 +35,6 @@ Elastic –, then create an {infer} endpoint by the <>.
Now use <> to perform
<> on your data.
-
[discrete]
[[default-enpoints]]
=== Default {infer} endpoints
@@ -53,6 +52,67 @@ For these models, the minimum number of allocations is `0`.
If there is no {infer} activity that uses the endpoint, the number of allocations will scale down to `0` automatically after 15 minutes.
+[discrete]
+[[infer-chunking-config]]
+=== Configuring chunking
+
+{infer-cap} endpoints have a limit on the amount of text they can process at once, determined by the model's input capacity.
+Chunking is the process of splitting the input text into pieces that remain within these limits.
+It occurs when ingesting documents into <>.
+Chunking also helps produce sections that are digestible for humans.
+Returning a long document in search results is less useful than providing the most relevant chunk of text.
+
+Each chunk will include the text subpassage and the corresponding embedding generated from it.
+
+By default, documents are split into sentences and grouped in sections up to 250 words with 1 sentence overlap so that each chunk shares a sentence with the previous chunk.
+Overlapping ensures continuity and prevents vital contextual information in the input text from being lost by a hard break.
+
+{es} uses the https://unicode-org.github.io/icu-docs/[ICU4J] library to detect word and sentence boundaries for chunking.
+https://unicode-org.github.io/icu/userguide/boundaryanalysis/#word-boundary[Word boundaries] are identified by following a series of rules, not just the presence of a whitespace character.
+For written languages that do use whitespace such as Chinese or Japanese dictionary lookups are used to detect word boundaries.
+
+
+[discrete]
+==== Chunking strategies
+
+Two strategies are available for chunking: `sentence` and `word`.
+
+The `sentence` strategy splits the input text at sentence boundaries.
+Each chunk contains one or more complete sentences ensuring that the integrity of sentence-level context is preserved, except when a sentence causes a chunk to exceed a word count of `max_chunk_size`, in which case it will be split across chunks.
+The `sentence_overlap` option defines the number of sentences from the previous chunk to include in the current chunk which is either `0` or `1`.
+
+The `word` strategy splits the input text on individual words up to the `max_chunk_size` limit.
+The `overlap` option is the number of words from the previous chunk to include in the current chunk.
+
+The default chunking strategy is `sentence`.
+
+NOTE: The default chunking strategy for {infer} endpoints created before 8.16 is `word`.
+
+
+[discrete]
+==== Example of configuring the chunking behavior
+
+The following example creates an {infer} endpoint with the `elasticsearch` service that deploys the ELSER model by default and configures the chunking behavior.
+
+[source,console]
+------------------------------------------------------------
+PUT _inference/sparse_embedding/small_chunk_size
+{
+ "service": "elasticsearch",
+ "service_settings": {
+ "num_allocations": 1,
+ "num_threads": 1
+ },
+ "chunking_settings": {
+ "strategy": "sentence",
+ "max_chunk_size": 100,
+ "sentence_overlap": 0
+ }
+}
+------------------------------------------------------------
+// TEST[skip:TBD]
+
+
include::delete-inference.asciidoc[]
include::get-inference.asciidoc[]
include::post-inference.asciidoc[]
diff --git a/docs/reference/inference/inference-shared.asciidoc b/docs/reference/inference/inference-shared.asciidoc
index 2eafa3434e89e..da497c6581e5d 100644
--- a/docs/reference/inference/inference-shared.asciidoc
+++ b/docs/reference/inference/inference-shared.asciidoc
@@ -31,4 +31,36 @@ end::task-settings[]
tag::task-type[]
The type of the {infer} task that the model will perform.
-end::task-type[]
\ No newline at end of file
+end::task-type[]
+
+tag::chunking-settings[]
+Chunking configuration object.
+Refer to <> to learn more about chunking.
+end::chunking-settings[]
+
+tag::chunking-settings-max-chunking-size[]
+Specifies the maximum size of a chunk in words.
+Defaults to `250`.
+This value cannot be higher than `300` or lower than `20` (for `sentence` strategy) or `10` (for `word` strategy).
+end::chunking-settings-max-chunking-size[]
+
+tag::chunking-settings-overlap[]
+Only for `word` chunking strategy.
+Specifies the number of overlapping words for chunks.
+Defaults to `100`.
+This value cannot be higher than the half of `max_chunking_size`.
+end::chunking-settings-overlap[]
+
+tag::chunking-settings-sentence-overlap[]
+Only for `sentence` chunking strategy.
+Specifies the numnber of overlapping sentences for chunks.
+It can be either `1` or `0`.
+Defaults to `1`.
+end::chunking-settings-sentence-overlap[]
+
+tag::chunking-settings-strategy[]
+Specifies the chunking strategy.
+It could be either `sentence` or `word`.
+end::chunking-settings-strategy[]
+
+
diff --git a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc
index 0607b56b528ea..c3ff40a39cd86 100644
--- a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc
+++ b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc
@@ -34,6 +34,26 @@ Available task types:
[[infer-service-alibabacloud-ai-search-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string) The type of service supported for the specified task type.
In this case,
@@ -108,7 +128,6 @@ To modify this, set the `requests_per_minute` setting of this object in your ser
include::inference-shared.asciidoc[tag=request-per-minute-example]
--
-
`task_settings`::
(Optional, object)
include::inference-shared.asciidoc[tag=task-settings]
diff --git a/docs/reference/inference/service-amazon-bedrock.asciidoc b/docs/reference/inference/service-amazon-bedrock.asciidoc
index dbffd5c26fbcc..761777e32f8e0 100644
--- a/docs/reference/inference/service-amazon-bedrock.asciidoc
+++ b/docs/reference/inference/service-amazon-bedrock.asciidoc
@@ -32,6 +32,26 @@ Available task types:
[[infer-service-amazon-bedrock-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string) The type of service supported for the specified task type.
In this case,
diff --git a/docs/reference/inference/service-anthropic.asciidoc b/docs/reference/inference/service-anthropic.asciidoc
index 41419db7a6069..7fb3d1d5bea34 100644
--- a/docs/reference/inference/service-anthropic.asciidoc
+++ b/docs/reference/inference/service-anthropic.asciidoc
@@ -32,6 +32,26 @@ Available task types:
[[infer-service-anthropic-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-azure-ai-studio.asciidoc b/docs/reference/inference/service-azure-ai-studio.asciidoc
index 0d711a0d6171f..dd13a3e59aae5 100644
--- a/docs/reference/inference/service-azure-ai-studio.asciidoc
+++ b/docs/reference/inference/service-azure-ai-studio.asciidoc
@@ -33,6 +33,26 @@ Available task types:
[[infer-service-azure-ai-studio-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-azure-openai.asciidoc b/docs/reference/inference/service-azure-openai.asciidoc
index 6f03c5966d9e6..b134e2b687f6c 100644
--- a/docs/reference/inference/service-azure-openai.asciidoc
+++ b/docs/reference/inference/service-azure-openai.asciidoc
@@ -33,6 +33,26 @@ Available task types:
[[infer-service-azure-openai-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-cohere.asciidoc b/docs/reference/inference/service-cohere.asciidoc
index 84eae6e880617..1a815e3c45f36 100644
--- a/docs/reference/inference/service-cohere.asciidoc
+++ b/docs/reference/inference/service-cohere.asciidoc
@@ -34,6 +34,26 @@ Available task types:
[[infer-service-cohere-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc
index 259779a12134d..0103b425faefe 100644
--- a/docs/reference/inference/service-elasticsearch.asciidoc
+++ b/docs/reference/inference/service-elasticsearch.asciidoc
@@ -36,6 +36,26 @@ Available task types:
[[infer-service-elasticsearch-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc
index 521fab0375584..273d743e47a4b 100644
--- a/docs/reference/inference/service-elser.asciidoc
+++ b/docs/reference/inference/service-elser.asciidoc
@@ -36,6 +36,26 @@ Available task types:
[[infer-service-elser-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-google-ai-studio.asciidoc b/docs/reference/inference/service-google-ai-studio.asciidoc
index 25aa89cd49110..738fce3d53e9b 100644
--- a/docs/reference/inference/service-google-ai-studio.asciidoc
+++ b/docs/reference/inference/service-google-ai-studio.asciidoc
@@ -33,6 +33,26 @@ Available task types:
[[infer-service-google-ai-studio-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-google-vertex-ai.asciidoc b/docs/reference/inference/service-google-vertex-ai.asciidoc
index 640553ab74626..34e14e05e072a 100644
--- a/docs/reference/inference/service-google-vertex-ai.asciidoc
+++ b/docs/reference/inference/service-google-vertex-ai.asciidoc
@@ -33,6 +33,26 @@ Available task types:
[[infer-service-google-vertex-ai-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-hugging-face.asciidoc b/docs/reference/inference/service-hugging-face.asciidoc
index 177a15177d21f..6d8667351a6b4 100644
--- a/docs/reference/inference/service-hugging-face.asciidoc
+++ b/docs/reference/inference/service-hugging-face.asciidoc
@@ -32,6 +32,26 @@ Available task types:
[[infer-service-hugging-face-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-mistral.asciidoc b/docs/reference/inference/service-mistral.asciidoc
index 077e610191705..244381d107161 100644
--- a/docs/reference/inference/service-mistral.asciidoc
+++ b/docs/reference/inference/service-mistral.asciidoc
@@ -32,6 +32,26 @@ Available task types:
[[infer-service-mistral-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc
index 075e76dc7d741..21643133553e1 100644
--- a/docs/reference/inference/service-openai.asciidoc
+++ b/docs/reference/inference/service-openai.asciidoc
@@ -33,6 +33,26 @@ Available task types:
[[infer-service-openai-api-request-body]]
==== {api-request-body-title}
+`chunking_settings`::
+(Optional, object)
+include::inference-shared.asciidoc[tag=chunking-settings]
+
+`max_chunking_size`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-max-chunking-size]
+
+`overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-overlap]
+
+`sentence_overlap`:::
+(Optional, integer)
+include::inference-shared.asciidoc[tag=chunking-settings-sentence-overlap]
+
+`strategy`:::
+(Optional, string)
+include::inference-shared.asciidoc[tag=chunking-settings-strategy]
+
`service`::
(Required, string)
The type of service supported for the specified task type. In this case,
diff --git a/docs/reference/search/search-your-data/search-application-overview.asciidoc b/docs/reference/search/search-your-data/search-application-overview.asciidoc
index e12b55911740b..13cc97bb8aeab 100644
--- a/docs/reference/search/search-your-data/search-application-overview.asciidoc
+++ b/docs/reference/search/search-your-data/search-application-overview.asciidoc
@@ -74,7 +74,7 @@ To create a new search application in {kib}:
. Name your search application.
. Select *Create*.
-Your search application should now be available in the list of search applications.
+Your search application should now be available in the list.
//[.screenshot]
// image::../../images/search-applications/search-applications-create.png[Create search application screen]
diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java
deleted file mode 100644
index 3547b3f9910ad..0000000000000
--- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the "Elastic License
- * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
- * Public License v 1"; you may not use this file except in compliance with, at
- * your election, the "Elastic License 2.0", the "GNU Affero General Public
- * License v3.0 only", or the "Server Side Public License, v 1".
- */
-
-package org.elasticsearch.plugins.internal;
-
-import org.elasticsearch.action.DocWriteRequest;
-import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.ingest.common.IngestCommonPlugin;
-import org.elasticsearch.plugins.IngestPlugin;
-import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.xcontent.FilterXContentParserWrapper;
-import org.elasticsearch.xcontent.XContentParser;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicLong;
-
-import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
-import static org.hamcrest.Matchers.equalTo;
-
-@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST)
-public class XContentMeteringParserDecoratorWithPipelinesIT extends ESIntegTestCase {
-
- private static String TEST_INDEX_NAME = "test-index-name";
- // the assertions are done in plugin which is static and will be created by ES server.
- // hence a static flag to make sure it is indeed used
- public static volatile boolean hasWrappedParser;
- public static AtomicLong providedFixedSize = new AtomicLong();
-
- public void testDocumentIsReportedWithPipelines() throws Exception {
- hasWrappedParser = false;
- // pipeline adding fields, changing destination is not affecting reporting
- putJsonPipeline("pipeline", """
- {
- "processors": [
- {
- "set": {
- "field": "my-text-field",
- "value": "xxxx"
- }
- },
- {
- "set": {
- "field": "my-boolean-field",
- "value": true
- }
- }
- ]
- }
- """);
-
- client().index(
- new IndexRequest(TEST_INDEX_NAME).setPipeline("pipeline")
- .id("1")
- .source(jsonBuilder().startObject().field("test", "I am sam i am").endObject())
- ).actionGet();
- assertBusy(() -> {
- // ingest node has used an observer that was counting #map operations
- // and passed that info to newFixedSize observer in TransportShardBulkAction
- assertTrue(hasWrappedParser);
- assertThat(providedFixedSize.get(), equalTo(1L));
- });
- }
-
- @Override
- protected Collection> nodePlugins() {
- return List.of(TestDocumentParsingProviderPlugin.class, IngestCommonPlugin.class);
- }
-
- public static class TestDocumentParsingProviderPlugin extends Plugin implements DocumentParsingProviderPlugin, IngestPlugin {
-
- public TestDocumentParsingProviderPlugin() {}
-
- @Override
- public DocumentParsingProvider getDocumentParsingProvider() {
- // returns a static instance, because we want to assert that the wrapping is called only once
- return new DocumentParsingProvider() {
- @Override
- public XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) {
- if (request instanceof IndexRequest indexRequest && indexRequest.getNormalisedBytesParsed() > 0) {
- long normalisedBytesParsed = indexRequest.getNormalisedBytesParsed();
- providedFixedSize.set(normalisedBytesParsed);
- return new TestXContentMeteringParserDecorator(normalisedBytesParsed);
- }
- return new TestXContentMeteringParserDecorator(0L);
- }
-
- @Override
- public DocumentSizeReporter newDocumentSizeReporter(
- String indexName,
- MapperService mapperService,
- DocumentSizeAccumulator documentSizeAccumulator
- ) {
- return DocumentSizeReporter.EMPTY_INSTANCE;
- }
- };
- }
- }
-
- public static class TestXContentMeteringParserDecorator implements XContentMeteringParserDecorator {
- long mapCounter = 0;
-
- public TestXContentMeteringParserDecorator(long mapCounter) {
- this.mapCounter = mapCounter;
- }
-
- @Override
- public XContentParser decorate(XContentParser xContentParser) {
- hasWrappedParser = true;
- return new FilterXContentParserWrapper(xContentParser) {
-
- @Override
- public Map map() throws IOException {
- mapCounter++;
- return super.map();
- }
- };
- }
-
- @Override
- public ParsedDocument.DocumentSize meteredDocumentSize() {
- return new ParsedDocument.DocumentSize(mapCounter, 0);
- }
- }
-
-}
diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java
index 786f091e0c024..7331afdbf585a 100644
--- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java
+++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceIT.java
@@ -46,15 +46,21 @@ public class DatabaseNodeServiceIT extends AbstractGeoIpIT {
public void testNonGzippedDatabase() throws Exception {
String databaseType = "GeoLite2-Country";
String databaseFileName = databaseType + ".mmdb";
- // making the dabase name unique so we know we're not using another one:
+ // making the database name unique so we know we're not using another one:
String databaseName = randomAlphaOfLength(20) + "-" + databaseFileName;
byte[] mmdbBytes = getBytesForFile(databaseFileName);
final DatabaseNodeService databaseNodeService = internalCluster().getInstance(DatabaseNodeService.class);
assertNull(databaseNodeService.getDatabase(databaseName));
int numChunks = indexData(databaseName, mmdbBytes);
- retrieveDatabase(databaseNodeService, databaseName, mmdbBytes, numChunks);
- assertBusy(() -> assertNotNull(databaseNodeService.getDatabase(databaseName)));
- assertValidDatabase(databaseNodeService, databaseName, databaseType);
+ /*
+ * If DatabaseNodeService::checkDatabases runs it will sometimes (rarely) remove the database we are using in this test while we
+ * are trying to assert things about it. So if it does then we 'just' try again.
+ */
+ assertBusy(() -> {
+ retrieveDatabase(databaseNodeService, databaseName, mmdbBytes, numChunks);
+ assertNotNull(databaseNodeService.getDatabase(databaseName));
+ assertValidDatabase(databaseNodeService, databaseName, databaseType);
+ });
}
/*
@@ -64,16 +70,22 @@ public void testNonGzippedDatabase() throws Exception {
public void testGzippedDatabase() throws Exception {
String databaseType = "GeoLite2-Country";
String databaseFileName = databaseType + ".mmdb";
- // making the dabase name unique so we know we're not using another one:
+ // making the database name unique so we know we're not using another one:
String databaseName = randomAlphaOfLength(20) + "-" + databaseFileName;
byte[] mmdbBytes = getBytesForFile(databaseFileName);
byte[] gzipBytes = gzipFileBytes(databaseName, mmdbBytes);
final DatabaseNodeService databaseNodeService = internalCluster().getInstance(DatabaseNodeService.class);
assertNull(databaseNodeService.getDatabase(databaseName));
int numChunks = indexData(databaseName, gzipBytes);
- retrieveDatabase(databaseNodeService, databaseName, gzipBytes, numChunks);
- assertBusy(() -> assertNotNull(databaseNodeService.getDatabase(databaseName)));
- assertValidDatabase(databaseNodeService, databaseName, databaseType);
+ /*
+ * If DatabaseNodeService::checkDatabases runs it will sometimes (rarely) remove the database we are using in this test while we
+ * are trying to assert things about it. So if it does then we 'just' try again.
+ */
+ assertBusy(() -> {
+ retrieveDatabase(databaseNodeService, databaseName, gzipBytes, numChunks);
+ assertNotNull(databaseNodeService.getDatabase(databaseName));
+ assertValidDatabase(databaseNodeService, databaseName, databaseType);
+ });
}
/*
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java
index d0cdc5a3e1b5e..11aa123824d18 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java
@@ -14,8 +14,10 @@
import com.maxmind.db.Reader;
import org.apache.lucene.util.Constants;
+import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.core.IOUtils;
+import org.elasticsearch.core.Strings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
@@ -113,7 +115,10 @@ public void testAsnFree() {
entry("asn", 16625L),
entry("network", "23.32.184.0/21"),
entry("domain", "akamai.com")
- )
+ ),
+ Map.ofEntries(entry("name", "organization_name"), entry("asn", "asn"), entry("network", "network"), entry("domain", "domain")),
+ Set.of("ip"),
+ Set.of()
);
}
@@ -133,7 +138,17 @@ public void testAsnStandard() {
entry("domain", "tpx.com"),
entry("type", "hosting"),
entry("country_iso_code", "US")
- )
+ ),
+ Map.ofEntries(
+ entry("name", "organization_name"),
+ entry("asn", "asn"),
+ entry("network", "network"),
+ entry("domain", "domain"),
+ entry("country", "country_iso_code"),
+ entry("type", "type")
+ ),
+ Set.of("ip"),
+ Set.of()
);
}
@@ -188,7 +203,16 @@ public void testCountryFree() {
entry("country_iso_code", "IE"),
entry("continent_name", "Europe"),
entry("continent_code", "EU")
- )
+ ),
+ Map.ofEntries(
+ entry("continent_name", "continent_name"),
+ entry("continent", "continent_code"),
+ entry("country", "country_iso_code"),
+ entry("country_name", "country_name"),
+ entry("type", "type")
+ ),
+ Set.of("ip"),
+ Set.of("network")
);
}
@@ -208,7 +232,18 @@ public void testGeolocationStandard() {
entry("timezone", "Europe/London"),
entry("postal_code", "E1W"),
entry("location", Map.of("lat", 51.50853, "lon", -0.12574))
- )
+ ),
+ Map.ofEntries(
+ entry("country", "country_iso_code"),
+ entry("region", "region_name"),
+ entry("city", "city_name"),
+ entry("timezone", "timezone"),
+ entry("postal_code", "postal_code"),
+ entry("lat", "location"),
+ entry("lng", "location")
+ ),
+ Set.of("ip", "location"),
+ Set.of("geoname_id", "region_code")
);
}
@@ -266,7 +301,16 @@ public void testPrivacyDetectionStandard() {
entry("relay", false),
entry("tor", false),
entry("vpn", true)
- )
+ ),
+ Map.ofEntries(
+ entry("hosting", "hosting"),
+ entry("proxy", "proxy"),
+ entry("relay", "relay"),
+ entry("tor", "tor"),
+ entry("vpn", "vpn")
+ ),
+ Set.of("ip"),
+ Set.of("network", "service")
);
}
@@ -286,7 +330,17 @@ public void testPrivacyDetectionStandardNonEmptyService() {
entry("relay", false),
entry("tor", false),
entry("vpn", true)
- )
+ ),
+ Map.ofEntries(
+ entry("hosting", "hosting"),
+ entry("proxy", "proxy"),
+ entry("service", "service"),
+ entry("relay", "relay"),
+ entry("tor", "tor"),
+ entry("vpn", "vpn")
+ ),
+ Set.of("ip"),
+ Set.of("network")
);
}
@@ -438,7 +492,15 @@ private static File pathToFile(Path databasePath) {
return databasePath.toFile();
}
- private void assertExpectedLookupResults(String databaseName, String ip, IpDataLookup lookup, Map expected) {
+ private void assertExpectedLookupResults(
+ String databaseName,
+ String ip,
+ IpDataLookup lookup,
+ Map expected,
+ Map keyMappings,
+ Set knownAdditionalKeys,
+ Set knownMissingKeys
+ ) {
try (DatabaseReaderLazyLoader loader = loader(databaseName)) {
Map actual = lookup.getData(loader, ip);
assertThat(
@@ -449,6 +511,7 @@ private void assertExpectedLookupResults(String databaseName, String ip, IpDataL
for (Map.Entry entry : expected.entrySet()) {
assertThat("Unexpected value for key [" + entry.getKey() + "]", actual.get(entry.getKey()), equalTo(entry.getValue()));
}
+ assertActualResultsMatchReader(actual, databaseName, ip, keyMappings, knownAdditionalKeys, knownMissingKeys);
} catch (AssertionError e) {
fail(e, "Assert failed for database [%s] with address [%s]", databaseName, ip);
} catch (Exception e) {
@@ -456,6 +519,42 @@ private void assertExpectedLookupResults(String databaseName, String ip, IpDataL
}
}
+ private void assertActualResultsMatchReader(
+ Map actual,
+ String databaseName,
+ String ip,
+ Map keyMappings,
+ Set knownAdditionalKeys,
+ Set knownMissingKeys
+ ) throws IOException {
+ Path databasePath = tmpDir.resolve(databaseName);
+ try (Reader reader = new Reader(pathToFile(databasePath))) {
+ @SuppressWarnings("unchecked")
+ Map data = reader.get(InetAddresses.forString(ip), Map.class);
+ for (String key : data.keySet()) {
+ if (keyMappings.containsKey(key)) {
+ assertTrue(
+ Strings.format(
+ "The reader returned key [%s] that is expected to map to key [%s], but [%s] did not appear in the "
+ + "actual data",
+ key,
+ keyMappings.get(key),
+ keyMappings.get(key)
+ ),
+ actual.containsKey(keyMappings.get(key))
+ );
+ } else if (knownMissingKeys.contains(key) == false) {
+ fail(null, "The reader returned unexpected key [%s]", key);
+ }
+ }
+ for (String key : actual.keySet()) {
+ if (keyMappings.containsValue(key) == false && knownAdditionalKeys.contains(key) == false) {
+ fail(null, "Unexpected key [%s] in results", key);
+ }
+ }
+ }
+ }
+
private DatabaseReaderLazyLoader loader(final String databaseName) {
Path path = tmpDir.resolve(databaseName);
copyDatabase("ipinfo/" + databaseName, path); // the ipinfo databases are prefixed on the test classpath
diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java
index 21f42bf9eb99c..b1c5d707220af 100644
--- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java
+++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java
@@ -19,6 +19,7 @@
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.core.Strings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.plugins.PluginsService;
@@ -53,11 +54,13 @@
import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_THROTTLES_TOTAL;
import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL;
import static org.elasticsearch.repositories.s3.S3RepositoriesMetrics.METRIC_DELETE_RETRIES_HISTOGRAM;
+import static org.elasticsearch.rest.RestStatus.FORBIDDEN;
import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR;
import static org.elasticsearch.rest.RestStatus.NOT_FOUND;
import static org.elasticsearch.rest.RestStatus.REQUESTED_RANGE_NOT_SATISFIED;
import static org.elasticsearch.rest.RestStatus.SERVICE_UNAVAILABLE;
import static org.elasticsearch.rest.RestStatus.TOO_MANY_REQUESTS;
+import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
@@ -320,6 +323,51 @@ public void testRetrySnapshotDeleteMetricsWhenRetriesExhausted() {
assertThat(longHistogramMeasurement.get(0).getLong(), equalTo(3L));
}
+ public void testPutDoesNotRetryOn403InStateful() {
+ final Settings settings = internalCluster().getInstance(Settings.class);
+ assertThat(DiscoveryNode.isStateless(settings), equalTo(false));
+
+ final String repository = createRepository(randomRepositoryName());
+ final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData);
+ final TestTelemetryPlugin plugin = getPlugin(dataNodeName);
+ // Exclude snapshot related purpose to avoid trigger assertions for cross-checking purpose and blob names
+ final OperationPurpose purpose = randomFrom(
+ OperationPurpose.REPOSITORY_ANALYSIS,
+ OperationPurpose.CLUSTER_STATE,
+ OperationPurpose.INDICES,
+ OperationPurpose.TRANSLOG
+ );
+ final BlobContainer blobContainer = getBlobContainer(dataNodeName, repository);
+ final String blobName = randomIdentifier();
+
+ plugin.resetMeter();
+ addErrorStatus(new S3ErrorResponse(FORBIDDEN, Strings.format("""
+
+
+ InvalidAccessKeyId
+ The AWS Access Key Id you provided does not exist in our records.
+ %s
+ """, randomUUID())));
+
+ final var exception = expectThrows(IOException.class, () -> {
+ if (randomBoolean()) {
+ blobContainer.writeBlob(purpose, blobName, new BytesArray("blob"), randomBoolean());
+ } else {
+ blobContainer.writeMetadataBlob(
+ purpose,
+ blobName,
+ randomBoolean(),
+ randomBoolean(),
+ outputStream -> outputStream.write("blob".getBytes())
+ );
+ }
+ });
+ assertThat(exception.getCause().getMessage(), containsString("InvalidAccessKeyId"));
+
+ assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_TOTAL, Operation.PUT_OBJECT), equalTo(1L));
+ assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_TOTAL, Operation.PUT_OBJECT), equalTo(1L));
+ }
+
private void addErrorStatus(RestStatus... statuses) {
errorResponseQueue.addAll(Arrays.stream(statuses).map(S3ErrorResponse::new).toList());
}
diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java
index 9042234de6f50..36eb1d61e21d7 100644
--- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java
+++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java
@@ -9,6 +9,7 @@
package org.elasticsearch.repositories.s3;
+import com.amazonaws.AmazonServiceException;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.SDKGlobalConfiguration;
import com.amazonaws.auth.AWSCredentials;
@@ -20,6 +21,8 @@
import com.amazonaws.auth.STSAssumeRoleWithWebIdentitySessionCredentialsProvider;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.http.IdleConnectionReaper;
+import com.amazonaws.retry.PredefinedRetryPolicies;
+import com.amazonaws.retry.RetryPolicy;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.internal.Constants;
@@ -27,6 +30,7 @@
import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClient;
import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClientBuilder;
+import org.apache.http.HttpStatus;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
@@ -193,7 +197,10 @@ AmazonS3 buildClient(final S3ClientSettings clientSettings) {
protected AmazonS3ClientBuilder buildClientBuilder(S3ClientSettings clientSettings) {
final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
builder.withCredentials(buildCredentials(LOGGER, clientSettings, webIdentityTokenCredentialsProvider));
- builder.withClientConfiguration(buildConfiguration(clientSettings));
+ final ClientConfiguration clientConfiguration = buildConfiguration(clientSettings, isStateless);
+ assert (isStateless == false && clientConfiguration.getRetryPolicy() == PredefinedRetryPolicies.DEFAULT)
+ || (isStateless && clientConfiguration.getRetryPolicy() == RETRYABLE_403_RETRY_POLICY) : "invalid retry policy configuration";
+ builder.withClientConfiguration(clientConfiguration);
String endpoint = Strings.hasLength(clientSettings.endpoint) ? clientSettings.endpoint : Constants.S3_HOSTNAME;
if ((endpoint.startsWith("http://") || endpoint.startsWith("https://")) == false) {
@@ -223,7 +230,7 @@ protected AmazonS3ClientBuilder buildClientBuilder(S3ClientSettings clientSettin
}
// pkg private for tests
- static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) {
+ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings, boolean isStateless) {
final ClientConfiguration clientConfiguration = new ClientConfiguration();
// the response metadata cache is only there for diagnostics purposes,
// but can force objects from every response to the old generation.
@@ -248,6 +255,10 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) {
clientConfiguration.setUseThrottleRetries(clientSettings.throttleRetries);
clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis);
+ if (isStateless) {
+ clientConfiguration.setRetryPolicy(RETRYABLE_403_RETRY_POLICY);
+ }
+
return clientConfiguration;
}
@@ -504,4 +515,21 @@ interface SystemEnvironment {
interface JvmEnvironment {
String getProperty(String key, String defaultValue);
}
+
+ static final RetryPolicy RETRYABLE_403_RETRY_POLICY = RetryPolicy.builder()
+ .withRetryCondition((originalRequest, exception, retriesAttempted) -> {
+ if (PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION.shouldRetry(originalRequest, exception, retriesAttempted)) {
+ return true;
+ }
+ if (exception instanceof AmazonServiceException ase) {
+ return ase.getStatusCode() == HttpStatus.SC_FORBIDDEN && "InvalidAccessKeyId".equals(ase.getErrorCode());
+ }
+ return false;
+ })
+ .withBackoffStrategy(PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY)
+ .withMaxErrorRetry(PredefinedRetryPolicies.DEFAULT_MAX_ERROR_RETRY)
+ .withHonorMaxErrorRetryInClientConfig(true)
+ .withHonorDefaultMaxErrorRetryInRetryMode(true)
+ .withHonorDefaultBackoffStrategyInRetryMode(true)
+ .build();
}
diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java
index 0aac0ba898f97..43f606135291d 100644
--- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java
+++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java
@@ -17,6 +17,7 @@
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper;
+import com.amazonaws.retry.PredefinedRetryPolicies;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.util.Supplier;
@@ -211,7 +212,7 @@ private void launchAWSConfigurationTest(
) {
final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default");
- final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings);
+ final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings, false);
assertThat(configuration.getResponseMetadataCacheSize(), is(0));
assertThat(configuration.getProtocol(), is(expectedProtocol));
@@ -222,6 +223,7 @@ private void launchAWSConfigurationTest(
assertThat(configuration.getMaxErrorRetry(), is(expectedMaxRetries));
assertThat(configuration.useThrottledRetries(), is(expectedUseThrottleRetries));
assertThat(configuration.getSocketTimeout(), is(expectedReadTimeout));
+ assertThat(configuration.getRetryPolicy(), is(PredefinedRetryPolicies.DEFAULT));
}
public void testEndpointSetting() {
diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java
index ddc7a1851c663..288ac1bb3c534 100644
--- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java
+++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java
@@ -194,9 +194,9 @@ public void testSignerOverrideCanBeSet() {
);
assertThat(settings.get("default").region, is(""));
assertThat(settings.get("other").signerOverride, is(signerOverride));
- ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default"));
+ ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default"), false);
assertThat(defaultConfiguration.getSignerOverride(), nullValue());
- ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other"));
+ ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other"), false);
assertThat(configuration.getSignerOverride(), is(signerOverride));
}
@@ -207,12 +207,18 @@ public void testMaxConnectionsCanBeSet() {
);
assertThat(settings.get("default").maxConnections, is(ClientConfiguration.DEFAULT_MAX_CONNECTIONS));
assertThat(settings.get("other").maxConnections, is(maxConnections));
- ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default"));
+ ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default"), false);
assertThat(defaultConfiguration.getMaxConnections(), is(ClientConfiguration.DEFAULT_MAX_CONNECTIONS));
- ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other"));
+ ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other"), false);
assertThat(configuration.getMaxConnections(), is(maxConnections));
// the default appears in the docs so let's make sure it doesn't change:
assertEquals(50, ClientConfiguration.DEFAULT_MAX_CONNECTIONS);
}
+
+ public void testStatelessDefaultRetryPolicy() {
+ final var s3ClientSettings = S3ClientSettings.load(Settings.EMPTY).get("default");
+ final var clientConfiguration = S3Service.buildConfiguration(s3ClientSettings, true);
+ assertThat(clientConfiguration.getRetryPolicy(), is(S3Service.RETRYABLE_403_RETRY_POLICY));
+ }
}
diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java
index 7bfaf56127fc7..afe1bb1a03c76 100644
--- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java
+++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java
@@ -8,23 +8,23 @@
*/
package org.elasticsearch.repositories.s3;
+import com.amazonaws.AmazonWebServiceRequest;
+import com.amazonaws.services.s3.model.AmazonS3Exception;
+
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.watcher.ResourceWatcherService;
-import org.mockito.Mockito;
import java.io.IOException;
+import static org.mockito.Mockito.mock;
+
public class S3ServiceTests extends ESTestCase {
public void testCachedClientsAreReleased() throws IOException {
- final S3Service s3Service = new S3Service(
- Mockito.mock(Environment.class),
- Settings.EMPTY,
- Mockito.mock(ResourceWatcherService.class)
- );
+ final S3Service s3Service = new S3Service(mock(Environment.class), Settings.EMPTY, mock(ResourceWatcherService.class));
final Settings settings = Settings.builder().put("endpoint", "http://first").build();
final RepositoryMetadata metadata1 = new RepositoryMetadata("first", "s3", settings);
final RepositoryMetadata metadata2 = new RepositoryMetadata("second", "s3", settings);
@@ -41,4 +41,25 @@ public void testCachedClientsAreReleased() throws IOException {
final S3ClientSettings clientSettingsReloaded = s3Service.settings(metadata1);
assertNotSame(clientSettings, clientSettingsReloaded);
}
+
+ public void testRetryOn403RetryPolicy() {
+ final AmazonS3Exception e = new AmazonS3Exception("error");
+ e.setStatusCode(403);
+ e.setErrorCode("InvalidAccessKeyId");
+
+ // Retry on 403 invalid access key id
+ assertTrue(
+ S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition().shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9))
+ );
+
+ // Not retry if not 403 or not invalid access key id
+ if (randomBoolean()) {
+ e.setStatusCode(randomValueOtherThan(403, () -> between(0, 600)));
+ } else {
+ e.setErrorCode(randomAlphaOfLength(10));
+ }
+ assertFalse(
+ S3Service.RETRYABLE_403_RETRY_POLICY.getRetryCondition().shouldRetry(mock(AmazonWebServiceRequest.class), e, between(0, 9))
+ );
+ }
}
diff --git a/muted-tests.yml b/muted-tests.yml
index 084bf27d6a11b..3a59af6234038 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -164,9 +164,6 @@ tests:
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {categorize.Categorize}
issue: https://github.com/elastic/elasticsearch/issues/113428
-- class: org.elasticsearch.xpack.inference.InferenceCrudIT
- method: testSupportedStream
- issue: https://github.com/elastic/elasticsearch/issues/113430
- class: org.elasticsearch.integration.KibanaUserRoleIntegTests
method: testFieldMappings
issue: https://github.com/elastic/elasticsearch/issues/113592
@@ -185,18 +182,9 @@ tests:
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {categorize.Categorize SYNC}
issue: https://github.com/elastic/elasticsearch/issues/113722
-- class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT
- method: testNonGzippedDatabase
- issue: https://github.com/elastic/elasticsearch/issues/113821
-- class: org.elasticsearch.ingest.geoip.DatabaseNodeServiceIT
- method: testGzippedDatabase
- issue: https://github.com/elastic/elasticsearch/issues/113752
- class: org.elasticsearch.threadpool.SimpleThreadPoolIT
method: testThreadPoolMetrics
issue: https://github.com/elastic/elasticsearch/issues/108320
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
- method: test {p0=range/20_synthetic_source/Date range}
- issue: https://github.com/elastic/elasticsearch/issues/113874
- class: org.elasticsearch.kibana.KibanaThreadPoolIT
method: testBlockedThreadPoolsRejectUserRequests
issue: https://github.com/elastic/elasticsearch/issues/113939
@@ -276,9 +264,33 @@ tests:
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search}
issue: https://github.com/elastic/elasticsearch/issues/115600
-- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT
- method: test {yaml=indices.create/10_basic/Create lookup index}
- issue: https://github.com/elastic/elasticsearch/issues/115605
+- class: org.elasticsearch.oldrepos.OldRepositoryAccessIT
+ method: testOldRepoAccess
+ issue: https://github.com/elastic/elasticsearch/issues/115631
+- class: org.elasticsearch.xpack.search.CrossClusterAsyncSearchIT
+ method: testCCSClusterDetailsWhereAllShardsSkippedInCanMatch
+ issue: https://github.com/elastic/elasticsearch/issues/115652
+- class: org.elasticsearch.index.get.GetResultTests
+ method: testToAndFromXContent
+ issue: https://github.com/elastic/elasticsearch/issues/115688
+- class: org.elasticsearch.action.update.UpdateResponseTests
+ method: testToAndFromXContent
+ issue: https://github.com/elastic/elasticsearch/issues/115689
+- class: org.elasticsearch.xpack.shutdown.NodeShutdownIT
+ method: testStalledShardMigrationProperlyDetected
+ issue: https://github.com/elastic/elasticsearch/issues/115697
+- class: org.elasticsearch.index.get.GetResultTests
+ method: testToAndFromXContentEmbedded
+ issue: https://github.com/elastic/elasticsearch/issues/115657
+- class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
+ method: testGeoShapeGeoHash
+ issue: https://github.com/elastic/elasticsearch/issues/115664
+- class: org.elasticsearch.xpack.inference.InferenceCrudIT
+ method: testSupportedStream
+ issue: https://github.com/elastic/elasticsearch/issues/113430
+- class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
+ method: testGeoShapeGeoTile
+ issue: https://github.com/elastic/elasticsearch/issues/115717
# Examples:
#
diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java
index ba79de4ab6cd1..8c369ebc9950d 100644
--- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java
+++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java
@@ -171,7 +171,7 @@ public void testLogsIndexing() throws IOException {
}
}
- private static void enableLogsdbByDefault() throws IOException {
+ static void enableLogsdbByDefault() throws IOException {
var request = new Request("PUT", "/_cluster/settings");
request.setJsonEntity("""
{
@@ -214,7 +214,7 @@ private static Request rolloverDataStream(final RestClient client, final String
}
@SuppressWarnings("unchecked")
- private static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException {
+ static String getWriteBackingIndex(final RestClient client, final String dataStreamName, int backingIndex) throws IOException {
final Request request = new Request("GET", "_data_stream/" + dataStreamName);
final List dataStreams = (List) entityAsMap(client.performRequest(request)).get("data_streams");
final Map dataStream = (Map) dataStreams.get(0);
diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java
new file mode 100644
index 0000000000000..9bdc43543e331
--- /dev/null
+++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsdbIndexingRollingUpgradeIT.java
@@ -0,0 +1,253 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.upgrades;
+
+import com.carrotsearch.randomizedtesting.annotations.Name;
+
+import org.elasticsearch.client.Request;
+import org.elasticsearch.client.Response;
+import org.elasticsearch.common.network.NetworkAddress;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.test.rest.ObjectPath;
+import org.elasticsearch.xcontent.XContentType;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.time.Instant;
+import java.util.Map;
+
+import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.enableLogsdbByDefault;
+import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.getWriteBackingIndex;
+import static org.elasticsearch.upgrades.TsdbIT.formatInstant;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.notNullValue;
+
+public class LogsdbIndexingRollingUpgradeIT extends AbstractRollingUpgradeTestCase {
+
+ static String BULK_ITEM_TEMPLATE =
+ """
+ {"@timestamp": "$now", "host.name": "$host", "method": "$method", "ip": "$ip", "message": "$message", "length": $length, "factor": $factor}
+ """;
+
+ private static final String TEMPLATE = """
+ {
+ "mappings": {
+ "properties": {
+ "@timestamp" : {
+ "type": "date"
+ },
+ "method": {
+ "type": "keyword"
+ },
+ "message": {
+ "type": "text"
+ },
+ "ip": {
+ "type": "ip"
+ },
+ "length": {
+ "type": "long"
+ },
+ "factor": {
+ "type": "double"
+ }
+ }
+ }
+ }""";
+
+ public LogsdbIndexingRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) {
+ super(upgradedNodes);
+ }
+
+ public void testIndexing() throws Exception {
+ String dataStreamName = "logs-bwc-test";
+ if (isOldCluster()) {
+ startTrial();
+ enableLogsdbByDefault();
+ createTemplate(dataStreamName, "3", TEMPLATE);
+
+ Instant startTime = Instant.now().minusSeconds(60 * 60);
+ bulkIndex(dataStreamName, 4, 1024, startTime);
+
+ String firstBackingIndex = getWriteBackingIndex(client(), dataStreamName, 0);
+ var settings = (Map, ?>) getIndexSettingsWithDefaults(firstBackingIndex).get(firstBackingIndex);
+ assertThat(((Map, ?>) settings.get("settings")).get("index.mode"), equalTo("logsdb"));
+ assertThat(((Map, ?>) settings.get("defaults")).get("index.mapping.source.mode"), equalTo("SYNTHETIC"));
+
+ ensureGreen(dataStreamName);
+ search(dataStreamName);
+ query(dataStreamName);
+ } else if (isMixedCluster()) {
+ Instant startTime = Instant.now().minusSeconds(60 * 30);
+ bulkIndex(dataStreamName, 4, 1024, startTime);
+
+ ensureGreen(dataStreamName);
+ search(dataStreamName);
+ query(dataStreamName);
+ } else if (isUpgradedCluster()) {
+ ensureGreen(dataStreamName);
+ Instant startTime = Instant.now();
+ bulkIndex(dataStreamName, 4, 1024, startTime);
+ search(dataStreamName);
+ query(dataStreamName);
+
+ var forceMergeRequest = new Request("POST", "/" + dataStreamName + "/_forcemerge");
+ forceMergeRequest.addParameter("max_num_segments", "1");
+ assertOK(client().performRequest(forceMergeRequest));
+
+ ensureGreen(dataStreamName);
+ search(dataStreamName);
+ query(dataStreamName);
+ }
+ }
+
+ static void createTemplate(String dataStreamName, String id, String template) throws IOException {
+ final String INDEX_TEMPLATE = """
+ {
+ "index_patterns": ["$DATASTREAM"],
+ "template": $TEMPLATE,
+ "data_stream": {
+ }
+ }""";
+ var putIndexTemplateRequest = new Request("POST", "/_index_template/" + id);
+ putIndexTemplateRequest.setJsonEntity(INDEX_TEMPLATE.replace("$TEMPLATE", template).replace("$DATASTREAM", dataStreamName));
+ assertOK(client().performRequest(putIndexTemplateRequest));
+ }
+
+ static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception {
+ for (int i = 0; i < numRequest; i++) {
+ var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk");
+ StringBuilder requestBody = new StringBuilder();
+ for (int j = 0; j < numDocs; j++) {
+ String hostName = "host" + j % 50; // Not realistic, but makes asserting search / query response easier.
+ String methodName = "method" + j % 5;
+ String ip = NetworkAddress.format(randomIp(true));
+ String message = randomAlphaOfLength(128);
+ long length = randomLong();
+ double factor = randomDouble();
+
+ requestBody.append("{\"create\": {}}");
+ requestBody.append('\n');
+ requestBody.append(
+ BULK_ITEM_TEMPLATE.replace("$now", formatInstant(startTime))
+ .replace("$host", hostName)
+ .replace("$method", methodName)
+ .replace("$ip", ip)
+ .replace("$message", message)
+ .replace("$length", Long.toString(length))
+ .replace("$factor", Double.toString(factor))
+ );
+ requestBody.append('\n');
+
+ startTime = startTime.plusMillis(1);
+ }
+ bulkRequest.setJsonEntity(requestBody.toString());
+ bulkRequest.addParameter("refresh", "true");
+ var response = client().performRequest(bulkRequest);
+ assertOK(response);
+ var responseBody = entityAsMap(response);
+ assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false));
+ }
+ }
+
+ void search(String dataStreamName) throws Exception {
+ var searchRequest = new Request("POST", "/" + dataStreamName + "/_search");
+ searchRequest.addParameter("pretty", "true");
+ searchRequest.setJsonEntity("""
+ {
+ "size": 0,
+ "aggs": {
+ "host_name": {
+ "terms": {
+ "field": "host.name",
+ "order": { "_key": "asc" }
+ },
+ "aggs": {
+ "max_length": {
+ "max": {
+ "field": "length"
+ }
+ },
+ "max_factor": {
+ "max": {
+ "field": "factor"
+ }
+ }
+ }
+ }
+ }
+ }
+ """);
+ var response = client().performRequest(searchRequest);
+ assertOK(response);
+ var responseBody = entityAsMap(response);
+
+ Integer totalCount = ObjectPath.evaluate(responseBody, "hits.total.value");
+ assertThat(totalCount, greaterThanOrEqualTo(4096));
+ String key = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.key");
+ assertThat(key, equalTo("host0"));
+ Integer docCount = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.doc_count");
+ assertThat(docCount, greaterThan(0));
+ Double maxTx = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.max_length.value");
+ assertThat(maxTx, notNullValue());
+ Double maxRx = ObjectPath.evaluate(responseBody, "aggregations.host_name.buckets.0.max_factor.value");
+ assertThat(maxRx, notNullValue());
+ }
+
+ void query(String dataStreamName) throws Exception {
+ var queryRequest = new Request("POST", "/_query");
+ queryRequest.addParameter("pretty", "true");
+ queryRequest.setJsonEntity("""
+ {
+ "query": "FROM $ds | STATS max(length), max(factor) BY host.name | SORT host.name | LIMIT 5"
+ }
+ """.replace("$ds", dataStreamName));
+ var response = client().performRequest(queryRequest);
+ assertOK(response);
+ var responseBody = entityAsMap(response);
+
+ String column1 = ObjectPath.evaluate(responseBody, "columns.0.name");
+ String column2 = ObjectPath.evaluate(responseBody, "columns.1.name");
+ String column3 = ObjectPath.evaluate(responseBody, "columns.2.name");
+ assertThat(column1, equalTo("max(length)"));
+ assertThat(column2, equalTo("max(factor)"));
+ assertThat(column3, equalTo("host.name"));
+
+ String key = ObjectPath.evaluate(responseBody, "values.0.2");
+ assertThat(key, equalTo("host0"));
+ Long maxRx = ObjectPath.evaluate(responseBody, "values.0.0");
+ assertThat(maxRx, notNullValue());
+ Double maxTx = ObjectPath.evaluate(responseBody, "values.0.1");
+ assertThat(maxTx, notNullValue());
+ }
+
+ protected static void startTrial() throws IOException {
+ Request startTrial = new Request("POST", "/_license/start_trial");
+ startTrial.addParameter("acknowledge", "true");
+ assertOK(client().performRequest(startTrial));
+ }
+
+ static Map getIndexSettingsWithDefaults(String index) throws IOException {
+ Request request = new Request("GET", "/" + index + "/_settings");
+ request.addParameter("flat_settings", "true");
+ request.addParameter("include_defaults", "true");
+ Response response = client().performRequest(request);
+ try (InputStream is = response.getEntity().getContent()) {
+ return XContentHelper.convertToMap(
+ XContentType.fromMediaType(response.getEntity().getContentType().getValue()).xContent(),
+ is,
+ true
+ );
+ }
+ }
+
+}
diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java
index 9e3030d510266..6744c84f29d0f 100644
--- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java
+++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java
@@ -33,7 +33,7 @@ public TsdbIT(@Name("upgradedNodes") int upgradedNodes) {
super(upgradedNodes);
}
- private static final String TEMPLATE = """
+ static final String TEMPLATE = """
{
"settings":{
"index": {
@@ -289,7 +289,7 @@ private static void assertSearch(String dataStreamName, int expectedHitCount) th
assertThat(ObjectPath.evaluate(responseBody, "hits.total.value"), equalTo(expectedHitCount));
}
- private static String formatInstant(Instant instant) {
+ static String formatInstant(Instant instant) {
return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant);
}
diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIndexingRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIndexingRollingUpgradeIT.java
new file mode 100644
index 0000000000000..1ac919ea57001
--- /dev/null
+++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIndexingRollingUpgradeIT.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.upgrades;
+
+import com.carrotsearch.randomizedtesting.annotations.Name;
+
+import org.elasticsearch.client.Request;
+import org.elasticsearch.common.network.NetworkAddress;
+import org.elasticsearch.test.rest.ObjectPath;
+
+import java.time.Instant;
+import java.util.Map;
+
+import static org.elasticsearch.upgrades.LogsIndexModeRollingUpgradeIT.getWriteBackingIndex;
+import static org.elasticsearch.upgrades.LogsdbIndexingRollingUpgradeIT.*;
+import static org.elasticsearch.upgrades.TsdbIT.TEMPLATE;
+import static org.elasticsearch.upgrades.TsdbIT.formatInstant;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.notNullValue;
+
+public class TsdbIndexingRollingUpgradeIT extends AbstractRollingUpgradeTestCase {
+
+ static String BULK_ITEM_TEMPLATE =
+ """
+ {"@timestamp": "$now", "metricset": "pod", "k8s": {"pod": {"name": "$name", "uid":"$uid", "ip": "$ip", "network": {"tx": $tx, "rx": $rx}}}}
+ """;
+
+ public TsdbIndexingRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) {
+ super(upgradedNodes);
+ }
+
+ public void testIndexing() throws Exception {
+ String dataStreamName = "k9s";
+ if (isOldCluster()) {
+ startTrial();
+ createTemplate(dataStreamName, "2", TEMPLATE);
+
+ Instant startTime = Instant.now().minusSeconds(60 * 60);
+ bulkIndex(dataStreamName, 4, 1024, startTime);
+
+ String firstBackingIndex = getWriteBackingIndex(client(), dataStreamName, 0);
+ var settings = (Map, ?>) getIndexSettingsWithDefaults(firstBackingIndex).get(firstBackingIndex);
+ assertThat(((Map, ?>) settings.get("settings")).get("index.mode"), equalTo("time_series"));
+ assertThat(((Map, ?>) settings.get("defaults")).get("index.mapping.source.mode"), equalTo("SYNTHETIC"));
+
+ ensureGreen(dataStreamName);
+ search(dataStreamName);
+ query(dataStreamName);
+ } else if (isMixedCluster()) {
+ Instant startTime = Instant.now().minusSeconds(60 * 30);
+ bulkIndex(dataStreamName, 4, 1024, startTime);
+
+ ensureGreen(dataStreamName);
+ search(dataStreamName);
+ query(dataStreamName);
+ } else if (isUpgradedCluster()) {
+ ensureGreen(dataStreamName);
+ Instant startTime = Instant.now();
+ bulkIndex(dataStreamName, 4, 1024, startTime);
+ search(dataStreamName);
+ query(dataStreamName);
+
+ var forceMergeRequest = new Request("POST", "/" + dataStreamName + "/_forcemerge");
+ forceMergeRequest.addParameter("max_num_segments", "1");
+ assertOK(client().performRequest(forceMergeRequest));
+
+ ensureGreen(dataStreamName);
+ search(dataStreamName);
+ query(dataStreamName);
+ }
+ }
+
+ static void bulkIndex(String dataStreamName, int numRequest, int numDocs, Instant startTime) throws Exception {
+ for (int i = 0; i < numRequest; i++) {
+ var bulkRequest = new Request("POST", "/" + dataStreamName + "/_bulk");
+ StringBuilder requestBody = new StringBuilder();
+ for (int j = 0; j < numDocs; j++) {
+ String podName = "pod" + j % 5; // Not realistic, but makes asserting search / query response easier.
+ String podUid = randomUUID();
+ String podIp = NetworkAddress.format(randomIp(true));
+ long podTx = randomLong();
+ long podRx = randomLong();
+
+ requestBody.append("{\"create\": {}}");
+ requestBody.append('\n');
+ requestBody.append(
+ BULK_ITEM_TEMPLATE.replace("$now", formatInstant(startTime))
+ .replace("$name", podName)
+ .replace("$uid", podUid)
+ .replace("$ip", podIp)
+ .replace("$tx", Long.toString(podTx))
+ .replace("$rx", Long.toString(podRx))
+ );
+ requestBody.append('\n');
+
+ startTime = startTime.plusMillis(1);
+ }
+ bulkRequest.setJsonEntity(requestBody.toString());
+ bulkRequest.addParameter("refresh", "true");
+ var response = client().performRequest(bulkRequest);
+ assertOK(response);
+ var responseBody = entityAsMap(response);
+ assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false));
+ }
+ }
+
+ void search(String dataStreamName) throws Exception {
+ var searchRequest = new Request("POST", "/" + dataStreamName + "/_search");
+ searchRequest.addParameter("pretty", "true");
+ searchRequest.setJsonEntity("""
+ {
+ "size": 0,
+ "aggs": {
+ "pod_name": {
+ "terms": {
+ "field": "k8s.pod.name",
+ "order": { "_key": "asc" }
+ },
+ "aggs": {
+ "max_tx": {
+ "max": {
+ "field": "k8s.pod.network.tx"
+ }
+ },
+ "max_rx": {
+ "max": {
+ "field": "k8s.pod.network.rx"
+ }
+ }
+ }
+ }
+ }
+ }
+ """);
+ var response = client().performRequest(searchRequest);
+ assertOK(response);
+ var responseBody = entityAsMap(response);
+
+ Integer totalCount = ObjectPath.evaluate(responseBody, "hits.total.value");
+ assertThat(totalCount, greaterThanOrEqualTo(4096));
+ String key = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.key");
+ assertThat(key, equalTo("pod0"));
+ Integer docCount = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.doc_count");
+ assertThat(docCount, greaterThan(0));
+ Double maxTx = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.max_tx.value");
+ assertThat(maxTx, notNullValue());
+ Double maxRx = ObjectPath.evaluate(responseBody, "aggregations.pod_name.buckets.0.max_rx.value");
+ assertThat(maxRx, notNullValue());
+ }
+
+ void query(String dataStreamName) throws Exception {
+ var queryRequest = new Request("POST", "/_query");
+ queryRequest.addParameter("pretty", "true");
+ queryRequest.setJsonEntity("""
+ {
+ "query": "FROM $ds | STATS max(k8s.pod.network.rx), max(k8s.pod.network.tx) BY k8s.pod.name | SORT k8s.pod.name | LIMIT 5"
+ }
+ """.replace("$ds", dataStreamName));
+ var response = client().performRequest(queryRequest);
+ assertOK(response);
+ var responseBody = entityAsMap(response);
+
+ String column1 = ObjectPath.evaluate(responseBody, "columns.0.name");
+ String column2 = ObjectPath.evaluate(responseBody, "columns.1.name");
+ String column3 = ObjectPath.evaluate(responseBody, "columns.2.name");
+ assertThat(column1, equalTo("max(k8s.pod.network.rx)"));
+ assertThat(column2, equalTo("max(k8s.pod.network.tx)"));
+ assertThat(column3, equalTo("k8s.pod.name"));
+
+ String key = ObjectPath.evaluate(responseBody, "values.0.2");
+ assertThat(key, equalTo("pod0"));
+ Long maxRx = ObjectPath.evaluate(responseBody, "values.0.0");
+ assertThat(maxRx, notNullValue());
+ Long maxTx = ObjectPath.evaluate(responseBody, "values.0.1");
+ assertThat(maxTx, notNullValue());
+ }
+
+}
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml
index cc92b52e0887a..de20f82f8ba2f 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml
@@ -525,140 +525,6 @@ setup:
_source:
ip_range: { "gte": "2001:db8::", "lte": null }
----
-"Date range":
- - skip:
- cluster_features: ["mapper.range.date_range_indexing_fix"]
- reason: "tests prior to rounding fixes in 8.16.0 that caused non-intuitive indexing and query because ranges were assumed to always index with 0's as the default such as when time is missing 00:00:00.000 time was assumed but for lte indexing and query missing time should be 23:59:59.999 as per docs here: https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-range-query.html"
-
- - do:
- index:
- index: synthetic_source_test
- id: "1"
- body: { "date_range" : { "gte": "2017-09-01", "lte": "2017-09-05" } }
-
- - do:
- index:
- index: synthetic_source_test
- id: "2"
- body: { "date_range" : { "gt": "2017-09-01", "lte": "2017-09-03" } }
-
- - do:
- index:
- index: synthetic_source_test
- id: "3"
- body: { "date_range" : [ { "gte": "2017-09-04", "lt": "2017-09-05" } ] }
-
- - do:
- index:
- index: synthetic_source_test
- id: "4"
- body: { "date_range" : [ { "gt": "2017-09-04", "lt": "2017-09-08" }, { "gt": "2017-09-04", "lt": "2017-09-07" } ] }
-
- - do:
- index:
- index: synthetic_source_test
- id: "5"
- body: { "date_range" : { "gte": 1504224000000, "lte": 1504569600000 } }
-
- - do:
- index:
- index: synthetic_source_test
- id: "6"
- body: { "date_range" : { "gte": "2017-09-01T10:20:30.123Z", "lte": "2017-09-05T03:04:05.789Z" } }
-
- - do:
- index:
- index: synthetic_source_test
- id: "7"
- body: { "date_range" : null }
-
- - do:
- index:
- index: synthetic_source_test
- id: "8"
- body: { "date_range": { "gte": null, "lte": "2017-09-05" } }
-
- - do:
- index:
- index: synthetic_source_test
- id: "9"
- body: { "date_range": { "gte": "2017-09-05" } }
-
- - do:
- indices.refresh: {}
-
- - do:
- get:
- index: synthetic_source_test
- id: "1"
- - match:
- _source:
- date_range: { "gte": "2017-09-01T00:00:00.000Z", "lte": "2017-09-05T00:00:00.000Z" }
-
- - do:
- get:
- index: synthetic_source_test
- id: "2"
- - match:
- _source:
- date_range: { "gte": "2017-09-01T00:00:00.001Z", "lte": "2017-09-03T00:00:00.000Z" }
-
- - do:
- get:
- index: synthetic_source_test
- id: "3"
- - match:
- _source:
- date_range: { "gte": "2017-09-04T00:00:00.000Z", "lte": "2017-09-04T23:59:59.999Z" }
-
- - do:
- get:
- index: synthetic_source_test
- id: "4"
- - match:
- _source:
- date_range: [ { "gte": "2017-09-04T00:00:00.001Z", "lte": "2017-09-06T23:59:59.999Z" }, { "gte": "2017-09-04T00:00:00.001Z", "lte": "2017-09-07T23:59:59.999Z" } ]
-
- - do:
- get:
- index: synthetic_source_test
- id: "5"
- - match:
- _source:
- date_range: { "gte": "2017-09-01T00:00:00.000Z", "lte": "2017-09-05T00:00:00.000Z" }
-
- - do:
- get:
- index: synthetic_source_test
- id: "6"
- - match:
- _source:
- date_range: { "gte": "2017-09-01T10:20:30.123Z", "lte": "2017-09-05T03:04:05.789Z" }
-
- - do:
- get:
- index: synthetic_source_test
- id: "7"
- - match:
- _source: {}
-
- - do:
- get:
- index: synthetic_source_test
- id: "8"
- - match:
- _source:
- date_range: { "gte": null, "lte": "2017-09-05T00:00:00.000Z" }
-
- - do:
- get:
- index: synthetic_source_test
- id: "9"
- - match:
- _source:
- date_range: { "gte": "2017-09-05T00:00:00.000Z", "lte": null }
-
---
"Date range Rounding Fixes":
- requires:
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java
index 20c10c3d8c1f9..8c80cee58f46c 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java
@@ -10,7 +10,7 @@
import org.apache.logging.log4j.Level;
import org.apache.lucene.util.Constants;
-import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse;
@@ -26,15 +26,14 @@
import org.hamcrest.Matcher;
import java.util.Map;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.ExecutionException;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
@@ -44,11 +43,10 @@
public class HotThreadsIT extends ESIntegTestCase {
- public void testHotThreadsDontFail() throws InterruptedException {
+ public void testHotThreadsDontFail() throws InterruptedException, ExecutionException {
// This test just checks if nothing crashes or gets stuck etc.
createIndex("test");
final int iters = scaledRandomIntBetween(2, 20);
- final AtomicBoolean hasErrors = new AtomicBoolean(false);
for (int i = 0; i < iters; i++) {
final NodesHotThreadsRequest request = new NodesHotThreadsRequest(
Strings.EMPTY_ARRAY,
@@ -67,36 +65,7 @@ public void testHotThreadsDontFail() throws InterruptedException {
randomBoolean()
)
);
- final CountDownLatch latch = new CountDownLatch(1);
- client().execute(TransportNodesHotThreadsAction.TYPE, request, new ActionListener<>() {
- @Override
- public void onResponse(NodesHotThreadsResponse nodeHotThreads) {
- boolean success = false;
- try {
- assertThat(nodeHotThreads, notNullValue());
- Map nodesMap = nodeHotThreads.getNodesMap();
- assertThat(nodeHotThreads.failures(), empty());
- assertThat(nodesMap.size(), equalTo(cluster().size()));
- for (NodeHotThreads ht : nodeHotThreads.getNodes()) {
- assertNotNull(ht.getHotThreads());
- }
- success = true;
- } finally {
- if (success == false) {
- hasErrors.set(true);
- }
- latch.countDown();
- }
- }
-
- @Override
- public void onFailure(Exception e) {
- logger.error("FAILED", e);
- hasErrors.set(true);
- latch.countDown();
- fail();
- }
- });
+ final ActionFuture hotThreadsFuture = client().execute(TransportNodesHotThreadsAction.TYPE, request);
indexRandom(
true,
@@ -105,7 +74,7 @@ public void onFailure(Exception e) {
prepareIndex("test").setId("3").setSource("field1", "value3")
);
ensureSearchable();
- while (latch.getCount() > 0) {
+ while (hotThreadsFuture.isDone() == false) {
assertHitCount(
prepareSearch().setQuery(matchAllQuery())
.setPostFilter(
@@ -115,8 +84,15 @@ public void onFailure(Exception e) {
3L
);
}
- safeAwait(latch);
- assertThat(hasErrors.get(), is(false));
+ assertResponse(hotThreadsFuture, nodeHotThreads -> {
+ assertThat(nodeHotThreads, notNullValue());
+ Map nodesMap = nodeHotThreads.getNodesMap();
+ assertThat(nodeHotThreads.failures(), empty());
+ assertThat(nodesMap.size(), equalTo(cluster().size()));
+ for (NodeHotThreads ht : nodeHotThreads.getNodes()) {
+ assertNotNull(ht.getHotThreads());
+ }
+ });
}
}
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java
index f11c145f71f23..f70667b91aec8 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorIT.java
@@ -9,7 +9,6 @@
package org.elasticsearch.plugins.internal;
-import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.EngineFactory;
@@ -126,7 +125,7 @@ public TestDocumentParsingProviderPlugin() {}
public DocumentParsingProvider getDocumentParsingProvider() {
return new DocumentParsingProvider() {
@Override
- public XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) {
+ public XContentMeteringParserDecorator newMeteringParserDecorator(IndexRequest request) {
return new TestXContentMeteringParserDecorator(0L);
}
@@ -152,8 +151,8 @@ public TestDocumentSizeReporter(String indexName) {
@Override
public void onIndexingCompleted(ParsedDocument parsedDocument) {
- long delta = parsedDocument.getNormalizedSize().ingestedBytes();
- if (delta > 0) {
+ long delta = parsedDocument.getNormalizedSize();
+ if (delta > XContentMeteringParserDecorator.UNKNOWN_SIZE) {
COUNTER.addAndGet(delta);
}
assertThat(indexName, equalTo(TEST_INDEX_NAME));
@@ -181,8 +180,8 @@ public Token nextToken() throws IOException {
}
@Override
- public ParsedDocument.DocumentSize meteredDocumentSize() {
- return new ParsedDocument.DocumentSize(counter, counter);
+ public long meteredDocumentSize() {
+ return counter;
}
}
}
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index 25bb792d827a9..3986ea4b97254 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -182,6 +182,7 @@ static TransportVersion def(int id) {
public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_00_0);
public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_00_0);
public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_00_0);
+ public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_00_0);
/*
* STOP! READ THIS FIRST! No, really,
diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java
index b98f5d87ee232..d0785a60dd0f5 100644
--- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java
@@ -146,9 +146,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement
* rawTimestamp field is used on the coordinate node, it doesn't need to be serialised.
*/
private Object rawTimestamp;
- private long normalisedBytesParsed = -1;
- private boolean originatesFromUpdateByScript;
- private boolean originatesFromUpdateByDoc;
public IndexRequest(StreamInput in) throws IOException {
this(null, in);
@@ -183,7 +180,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio
dynamicTemplates = in.readMap(StreamInput::readString);
if (in.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED)
&& in.getTransportVersion().before(TransportVersions.V_8_13_0)) {
- in.readBoolean();
+ in.readBoolean(); // obsolete, prior to tracking normalisedBytesParsed
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) {
this.listExecutedPipelines = in.readBoolean();
@@ -196,21 +193,20 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
requireDataStream = in.readBoolean();
- normalisedBytesParsed = in.readZLong();
} else {
requireDataStream = false;
}
- if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) {
- originatesFromUpdateByScript = in.readBoolean();
- } else {
- originatesFromUpdateByScript = false;
- }
-
- if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) {
- originatesFromUpdateByDoc = in.readBoolean();
- } else {
- originatesFromUpdateByDoc = false;
+ if (in.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) {
+ if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
+ in.readZLong(); // obsolete normalisedBytesParsed
+ }
+ if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) {
+ in.readBoolean(); // obsolete originatesFromUpdateByScript
+ }
+ if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) {
+ in.readBoolean(); // obsolete originatesFromUpdateByDoc
+ }
}
}
@@ -759,7 +755,7 @@ private void writeBody(StreamOutput out) throws IOException {
out.writeMap(dynamicTemplates, StreamOutput::writeString);
if (out.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED)
&& out.getTransportVersion().before(TransportVersions.V_8_13_0)) {
- out.writeBoolean(normalisedBytesParsed != -1L);
+ out.writeBoolean(false); // obsolete, prior to tracking normalisedBytesParsed
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) {
out.writeBoolean(listExecutedPipelines);
@@ -770,15 +766,18 @@ private void writeBody(StreamOutput out) throws IOException {
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
out.writeBoolean(requireDataStream);
- out.writeZLong(normalisedBytesParsed);
- }
-
- if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) {
- out.writeBoolean(originatesFromUpdateByScript);
}
- if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) {
- out.writeBoolean(originatesFromUpdateByDoc);
+ if (out.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) {
+ if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
+ out.writeZLong(-1); // obsolete normalisedBytesParsed
+ }
+ if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_SCRIPT_ORIGIN)) {
+ out.writeBoolean(false); // obsolete originatesFromUpdateByScript
+ }
+ if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_REQUEST_UPDATE_BY_DOC_ORIGIN)) {
+ out.writeBoolean(false); // obsolete originatesFromUpdateByDoc
+ }
}
}
@@ -928,24 +927,6 @@ public void setRawTimestamp(Object rawTimestamp) {
this.rawTimestamp = rawTimestamp;
}
- /**
- * Returns a number of bytes observed when parsing a document in earlier stages of ingestion (like update/ingest service)
- * Defaults to -1 when a document size was not observed in earlier stages.
- * @return a number of bytes observed
- */
- public long getNormalisedBytesParsed() {
- return normalisedBytesParsed;
- }
-
- /**
- * Sets number of bytes observed by a DocumentSizeObserver
- * @return an index request
- */
- public IndexRequest setNormalisedBytesParsed(long normalisedBytesParsed) {
- this.normalisedBytesParsed = normalisedBytesParsed;
- return this;
- }
-
/**
* Adds the pipeline to the list of executed pipelines, if listExecutedPipelines is true
*
@@ -976,22 +957,4 @@ public List getExecutedPipelines() {
return Collections.unmodifiableList(executedPipelines);
}
}
-
- public IndexRequest setOriginatesFromUpdateByScript(boolean originatesFromUpdateByScript) {
- this.originatesFromUpdateByScript = originatesFromUpdateByScript;
- return this;
- }
-
- public boolean originatesFromUpdateByScript() {
- return originatesFromUpdateByScript;
- }
-
- public boolean originatesFromUpdateByDoc() {
- return originatesFromUpdateByDoc;
- }
-
- public IndexRequest setOriginatesFromUpdateByDoc(boolean originatesFromUpdateByDoc) {
- this.originatesFromUpdateByDoc = originatesFromUpdateByDoc;
- return this;
- }
}
diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
index 0c585c705dcd0..cf25c5730d341 100644
--- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
+++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
@@ -20,14 +20,13 @@
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider;
+import org.elasticsearch.action.support.SubscribableListener;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
-import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.AtomicArray;
-import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.index.shard.ShardId;
@@ -43,7 +42,6 @@
import org.elasticsearch.tasks.TaskCancelledException;
import org.elasticsearch.transport.Transport;
-import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -51,9 +49,12 @@
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
+import java.util.concurrent.LinkedTransferQueue;
+import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction;
+import java.util.function.Consumer;
import java.util.stream.Collectors;
import static org.elasticsearch.core.Strings.format;
@@ -238,7 +239,12 @@ public final void run() {
assert shardRoutings.skip() == false;
assert shardIndexMap.containsKey(shardRoutings);
int shardIndex = shardIndexMap.get(shardRoutings);
- performPhaseOnShard(shardIndex, shardRoutings, shardRoutings.nextOrNull());
+ final SearchShardTarget routing = shardRoutings.nextOrNull();
+ if (routing == null) {
+ failOnUnavailable(shardIndex, shardRoutings);
+ } else {
+ performPhaseOnShard(shardIndex, shardRoutings, routing);
+ }
}
}
}
@@ -258,7 +264,7 @@ private static boolean assertExecuteOnStartThread() {
int index = 0;
assert stackTraceElements[index++].getMethodName().equals("getStackTrace");
assert stackTraceElements[index++].getMethodName().equals("assertExecuteOnStartThread");
- assert stackTraceElements[index++].getMethodName().equals("performPhaseOnShard");
+ assert stackTraceElements[index++].getMethodName().equals("failOnUnavailable");
if (stackTraceElements[index].getMethodName().equals("performPhaseOnShard")) {
assert stackTraceElements[index].getClassName().endsWith("CanMatchPreFilterSearchPhase");
index++;
@@ -277,65 +283,53 @@ private static boolean assertExecuteOnStartThread() {
}
protected void performPhaseOnShard(final int shardIndex, final SearchShardIterator shardIt, final SearchShardTarget shard) {
- /*
- * We capture the thread that this phase is starting on. When we are called back after executing the phase, we are either on the
- * same thread (because we never went async, or the same thread was selected from the thread pool) or a different thread. If we
- * continue on the same thread in the case that we never went async and this happens repeatedly we will end up recursing deeply and
- * could stack overflow. To prevent this, we fork if we are called back on the same thread that execution started on and otherwise
- * we can continue (cf. InitialSearchPhase#maybeFork).
- */
- if (shard == null) {
- assert assertExecuteOnStartThread();
- SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias());
- onShardFailure(shardIndex, unassignedShard, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
+ if (throttleConcurrentRequests) {
+ var pendingExecutions = pendingExecutionsPerNode.computeIfAbsent(
+ shard.getNodeId(),
+ n -> new PendingExecutions(maxConcurrentRequestsPerNode)
+ );
+ pendingExecutions.submit(l -> doPerformPhaseOnShard(shardIndex, shardIt, shard, l));
} else {
- final PendingExecutions pendingExecutions = throttleConcurrentRequests
- ? pendingExecutionsPerNode.computeIfAbsent(shard.getNodeId(), n -> new PendingExecutions(maxConcurrentRequestsPerNode))
- : null;
- Runnable r = () -> {
- final Thread thread = Thread.currentThread();
- try {
- executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) {
- @Override
- public void innerOnResponse(Result result) {
- try {
- onShardResult(result, shardIt);
- } catch (Exception exc) {
- onShardFailure(shardIndex, shard, shardIt, exc);
- } finally {
- executeNext(pendingExecutions, thread);
- }
- }
+ doPerformPhaseOnShard(shardIndex, shardIt, shard, () -> {});
+ }
+ }
- @Override
- public void onFailure(Exception t) {
- try {
- onShardFailure(shardIndex, shard, shardIt, t);
- } finally {
- executeNext(pendingExecutions, thread);
- }
- }
- });
- } catch (final Exception e) {
- try {
- /*
- * It is possible to run into connection exceptions here because we are getting the connection early and might
- * run into nodes that are not connected. In this case, on shard failure will move us to the next shard copy.
- */
- fork(() -> onShardFailure(shardIndex, shard, shardIt, e));
- } finally {
- executeNext(pendingExecutions, thread);
+ private void doPerformPhaseOnShard(int shardIndex, SearchShardIterator shardIt, SearchShardTarget shard, Releasable releasable) {
+ try {
+ executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) {
+ @Override
+ public void innerOnResponse(Result result) {
+ try (releasable) {
+ onShardResult(result, shardIt);
+ } catch (Exception exc) {
+ onShardFailure(shardIndex, shard, shardIt, exc);
}
}
- };
- if (throttleConcurrentRequests) {
- pendingExecutions.tryRun(r);
- } else {
- r.run();
+
+ @Override
+ public void onFailure(Exception e) {
+ try (releasable) {
+ onShardFailure(shardIndex, shard, shardIt, e);
+ }
+ }
+ });
+ } catch (final Exception e) {
+ /*
+ * It is possible to run into connection exceptions here because we are getting the connection early and might
+ * run into nodes that are not connected. In this case, on shard failure will move us to the next shard copy.
+ */
+ try (releasable) {
+ onShardFailure(shardIndex, shard, shardIt, e);
}
}
}
+ private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) {
+ assert assertExecuteOnStartThread();
+ SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias());
+ onShardFailure(shardIndex, unassignedShard, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
+ }
+
/**
* Sends the request to the actual shard.
* @param shardIt the shards iterator
@@ -348,34 +342,6 @@ protected abstract void executePhaseOnShard(
SearchActionListener listener
);
- protected void fork(final Runnable runnable) {
- executor.execute(new AbstractRunnable() {
- @Override
- public void onFailure(Exception e) {
- logger.error(() -> "unexpected error during [" + task + "]", e);
- assert false : e;
- }
-
- @Override
- public void onRejection(Exception e) {
- // avoid leaks during node shutdown by executing on the current thread if the executor shuts down
- assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e;
- doRun();
- }
-
- @Override
- protected void doRun() {
- runnable.run();
- }
-
- @Override
- public boolean isForceExecution() {
- // we can not allow a stuffed queue to reject execution here
- return true;
- }
- });
- }
-
@Override
public final void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPhase) {
/* This is the main search phase transition where we move to the next phase. If all shards
@@ -794,61 +760,63 @@ protected final ShardSearchRequest buildShardSearchRequest(SearchShardIterator s
*/
protected abstract SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context);
- private void executeNext(PendingExecutions pendingExecutions, Thread originalThread) {
- executeNext(pendingExecutions == null ? null : pendingExecutions.finishAndGetNext(), originalThread);
- }
-
- void executeNext(Runnable runnable, Thread originalThread) {
- if (runnable != null) {
- assert throttleConcurrentRequests;
- if (originalThread == Thread.currentThread()) {
- fork(runnable);
- } else {
- runnable.run();
- }
- }
- }
-
private static final class PendingExecutions {
- private final int permits;
- private int permitsTaken = 0;
- private final ArrayDeque queue = new ArrayDeque<>();
+ private final Semaphore semaphore;
+ private final LinkedTransferQueue> queue = new LinkedTransferQueue<>();
PendingExecutions(int permits) {
assert permits > 0 : "not enough permits: " + permits;
- this.permits = permits;
+ semaphore = new Semaphore(permits);
}
- Runnable finishAndGetNext() {
- synchronized (this) {
- permitsTaken--;
- assert permitsTaken >= 0 : "illegal taken permits: " + permitsTaken;
+ void submit(Consumer task) {
+ if (semaphore.tryAcquire()) {
+ executeAndRelease(task);
+ } else {
+ queue.add(task);
+ if (semaphore.tryAcquire()) {
+ task = pollNextTaskOrReleasePermit();
+ if (task != null) {
+ executeAndRelease(task);
+ }
+ }
}
- return tryQueue(null);
+
}
- void tryRun(Runnable runnable) {
- Runnable r = tryQueue(runnable);
- if (r != null) {
- r.run();
+ private void executeAndRelease(Consumer task) {
+ while (task != null) {
+ final SubscribableListener onDone = new SubscribableListener<>();
+ task.accept(() -> onDone.onResponse(null));
+ if (onDone.isDone()) {
+ // keep going on the current thread, no need to fork
+ task = pollNextTaskOrReleasePermit();
+ } else {
+ onDone.addListener(new ActionListener<>() {
+ @Override
+ public void onResponse(Void unused) {
+ final Consumer nextTask = pollNextTaskOrReleasePermit();
+ if (nextTask != null) {
+ executeAndRelease(nextTask);
+ }
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ assert false : e;
+ }
+ });
+ return;
+ }
}
}
- private synchronized Runnable tryQueue(Runnable runnable) {
- Runnable toExecute = null;
- if (permitsTaken < permits) {
- permitsTaken++;
- toExecute = runnable;
- if (toExecute == null) { // only poll if we don't have anything to execute
- toExecute = queue.poll();
- }
- if (toExecute == null) {
- permitsTaken--;
- }
- } else if (runnable != null) {
- queue.add(runnable);
+ private Consumer pollNextTaskOrReleasePermit() {
+ var task = queue.poll();
+ if (task == null) {
+ semaphore.release();
}
- return toExecute;
+ return task;
}
}
}
diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
index 212b99ca140d3..d32e102b2e18b 100644
--- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
+++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
@@ -28,8 +28,7 @@
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
-import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
+import org.elasticsearch.plugins.internal.XContentParserDecorator;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.UpdateCtxMap;
@@ -51,11 +50,9 @@ public class UpdateHelper {
private static final Logger logger = LogManager.getLogger(UpdateHelper.class);
private final ScriptService scriptService;
- private final DocumentParsingProvider documentParsingProvider;
- public UpdateHelper(ScriptService scriptService, DocumentParsingProvider documentParsingProvider) {
+ public UpdateHelper(ScriptService scriptService) {
this.scriptService = scriptService;
- this.documentParsingProvider = documentParsingProvider;
}
/**
@@ -183,14 +180,13 @@ static String calculateRouting(GetResult getResult, @Nullable IndexRequest updat
Result prepareUpdateIndexRequest(IndexShard indexShard, UpdateRequest request, GetResult getResult, boolean detectNoop) {
final IndexRequest currentRequest = request.doc();
final String routing = calculateRouting(getResult, currentRequest);
- final XContentMeteringParserDecorator meteringParserDecorator = documentParsingProvider.newMeteringParserDecorator(request);
final Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true);
final XContentType updateSourceContentType = sourceAndContent.v1();
final Map updatedSourceAsMap = sourceAndContent.v2();
final boolean noop = XContentHelper.update(
updatedSourceAsMap,
- currentRequest.sourceAsMap(meteringParserDecorator),
+ currentRequest.sourceAsMap(XContentParserDecorator.NOOP),
detectNoop
) == false;
@@ -228,9 +224,7 @@ Result prepareUpdateIndexRequest(IndexShard indexShard, UpdateRequest request, G
.setIfPrimaryTerm(getResult.getPrimaryTerm())
.waitForActiveShards(request.waitForActiveShards())
.timeout(request.timeout())
- .setRefreshPolicy(request.getRefreshPolicy())
- .setOriginatesFromUpdateByDoc(true);
- finalIndexRequest.setNormalisedBytesParsed(meteringParserDecorator.meteredDocumentSize().ingestedBytes());
+ .setRefreshPolicy(request.getRefreshPolicy());
return new Result(finalIndexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType);
}
}
@@ -272,8 +266,7 @@ Result prepareUpdateScriptRequest(IndexShard indexShard, UpdateRequest request,
.setIfPrimaryTerm(getResult.getPrimaryTerm())
.waitForActiveShards(request.waitForActiveShards())
.timeout(request.timeout())
- .setRefreshPolicy(request.getRefreshPolicy())
- .setOriginatesFromUpdateByScript(true);
+ .setRefreshPolicy(request.getRefreshPolicy());
return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType);
}
case DELETE -> {
diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java
index a0b6e665042d0..aec9c108d898d 100644
--- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java
+++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java
@@ -34,6 +34,7 @@
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
+import java.math.BigInteger;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collections;
@@ -1485,27 +1486,68 @@ public static int parseInt(String s, int minValue, int maxValue, String key) {
}
public static int parseInt(String s, int minValue, int maxValue, String key, boolean isFiltered) {
- int value = Integer.parseInt(s);
+ int value;
+ try {
+ value = Integer.parseInt(s);
+ } catch (NumberFormatException e) {
+ // check if value is a number or garbage
+ try {
+ var bi = new BigInteger(s);
+ // it's a number, so check which bound it is outside
+ if (bi.compareTo(BigInteger.valueOf(minValue)) < 0) {
+ throw newNumericBoundsException(s, key, isFiltered, ">=", minValue);
+ } else {
+ throw newNumericBoundsException(s, key, isFiltered, "<=", maxValue);
+ }
+ } catch (NumberFormatException e2) {
+ throw e; // it's garbage, use the original exception
+ }
+ }
if (value < minValue) {
- String err = "Failed to parse value" + (isFiltered ? "" : " [" + s + "]") + " for setting [" + key + "] must be >= " + minValue;
- throw new IllegalArgumentException(err);
+ throw newNumericBoundsException(s, key, isFiltered, ">=", minValue);
}
if (value > maxValue) {
- String err = "Failed to parse value" + (isFiltered ? "" : " [" + s + "]") + " for setting [" + key + "] must be <= " + maxValue;
- throw new IllegalArgumentException(err);
+ throw newNumericBoundsException(s, key, isFiltered, "<=", maxValue);
}
return value;
}
static long parseLong(String s, long minValue, String key, boolean isFiltered) {
- long value = Long.parseLong(s);
+ long value;
+ try {
+ value = Long.parseLong(s);
+ } catch (NumberFormatException e) {
+ // check if value is a number or garbage
+ try {
+ var bi = new BigInteger(s);
+ // it's a number, so check which bound it is outside
+ if (bi.compareTo(BigInteger.valueOf(minValue)) < 0) {
+ throw newNumericBoundsException(s, key, isFiltered, ">=", minValue);
+ } else {
+ throw newNumericBoundsException(s, key, isFiltered, "<=", Long.MAX_VALUE);
+ }
+ } catch (NumberFormatException e2) {
+ throw e; // it's garbage, use the original exception
+ }
+ }
if (value < minValue) {
- String err = "Failed to parse value" + (isFiltered ? "" : " [" + s + "]") + " for setting [" + key + "] must be >= " + minValue;
- throw new IllegalArgumentException(err);
+ throw newNumericBoundsException(s, key, isFiltered, ">=", minValue);
}
return value;
}
+ private static IllegalArgumentException newNumericBoundsException(String s, String key, boolean isFiltered, String type, long bound) {
+ String err = "Failed to parse value"
+ + (isFiltered ? "" : " [" + s + "]")
+ + " for setting ["
+ + key
+ + "] must be "
+ + type
+ + " "
+ + bound;
+ throw new IllegalArgumentException(err);
+ }
+
public static Setting intSetting(String key, int defaultValue, Property... properties) {
return intSetting(key, defaultValue, Integer.MIN_VALUE, properties);
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
index 1ed0a117ddd89..bde9b0fb8a4ab 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
@@ -80,7 +80,7 @@ public ParsedDocument parseDocument(SourceToParse source, MappingLookup mappingL
final RootDocumentParserContext context;
final XContentType xContentType = source.getXContentType();
- XContentMeteringParserDecorator meteringParserDecorator = source.getDocumentSizeObserver();
+ XContentMeteringParserDecorator meteringParserDecorator = source.getMeteringParserDecorator();
try (
XContentParser parser = meteringParserDecorator.decorate(
XContentHelper.createParser(parserConfiguration, source.source(), xContentType)
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java b/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java
index b1d882f04de54..f2ddf38fe4357 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java
@@ -15,6 +15,7 @@
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
+import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.xcontent.XContentType;
import java.util.Collections;
@@ -24,6 +25,7 @@
* The result of parsing a document.
*/
public class ParsedDocument {
+
private final Field version;
private final String id;
@@ -33,7 +35,7 @@ public class ParsedDocument {
private final List documents;
- private final DocumentSize normalizedSize;
+ private final long normalizedSize;
private BytesReference source;
private XContentType xContentType;
@@ -61,7 +63,7 @@ public static ParsedDocument noopTombstone(String reason) {
new BytesArray("{}"),
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
}
@@ -86,7 +88,7 @@ public static ParsedDocument deleteTombstone(String id) {
new BytesArray("{}"),
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
}
@@ -99,7 +101,7 @@ public ParsedDocument(
BytesReference source,
XContentType xContentType,
Mapping dynamicMappingsUpdate,
- DocumentSize normalizedSize
+ long normalizedSize
) {
this.version = version;
this.seqID = seqID;
@@ -178,16 +180,7 @@ public String documentDescription() {
return "id";
}
- public DocumentSize getNormalizedSize() {
+ public long getNormalizedSize() {
return normalizedSize;
}
-
- /**
- * Normalized ingested and stored size of a document.
- * @param ingestedBytes ingest size of the document
- * @param storedBytes stored retained size of the document
- */
- public record DocumentSize(long ingestedBytes, long storedBytes) {
- public static final DocumentSize UNKNOWN = new DocumentSize(-1, -1);
- }
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java
index a8cb03c223833..879e0fe785df2 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java
@@ -91,7 +91,7 @@ public XContentType getXContentType() {
return this.xContentType;
}
- public XContentMeteringParserDecorator getDocumentSizeObserver() {
+ public XContentMeteringParserDecorator getMeteringParserDecorator() {
return meteringParserDecorator;
}
}
diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java
index 99ff44a3cd135..b5ac54b018e46 100644
--- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java
+++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java
@@ -68,8 +68,6 @@
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.node.ReportingService;
import org.elasticsearch.plugins.IngestPlugin;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
-import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.plugins.internal.XContentParserDecorator;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.Scheduler;
@@ -121,7 +119,6 @@ public class IngestService implements ClusterStateApplier, ReportingService taskQueue;
private final ClusterService clusterService;
private final ScriptService scriptService;
- private final DocumentParsingProvider documentParsingProvider;
private final Map processorFactories;
// Ideally this should be in IngestMetadata class, but we don't have the processor factories around there.
// We know of all the processor factories when a node with all its plugin have been initialized. Also some
@@ -204,12 +201,10 @@ public IngestService(
List ingestPlugins,
Client client,
MatcherWatchdog matcherWatchdog,
- DocumentParsingProvider documentParsingProvider,
FailureStoreMetrics failureStoreMetrics
) {
this.clusterService = clusterService;
this.scriptService = scriptService;
- this.documentParsingProvider = documentParsingProvider;
this.processorFactories = processorFactories(
ingestPlugins,
new Processor.Parameters(
@@ -238,7 +233,6 @@ public IngestService(
IngestService(IngestService ingestService) {
this.clusterService = ingestService.clusterService;
this.scriptService = ingestService.scriptService;
- this.documentParsingProvider = ingestService.documentParsingProvider;
this.processorFactories = ingestService.processorFactories;
this.threadPool = ingestService.threadPool;
this.taskQueue = ingestService.taskQueue;
@@ -776,10 +770,7 @@ protected void doRun() {
}
final int slot = i;
final Releasable ref = refs.acquire();
- final XContentMeteringParserDecorator meteringParserDecorator = documentParsingProvider.newMeteringParserDecorator(
- indexRequest
- );
- final IngestDocument ingestDocument = newIngestDocument(indexRequest, meteringParserDecorator);
+ final IngestDocument ingestDocument = newIngestDocument(indexRequest);
final org.elasticsearch.script.Metadata originalDocumentMetadata = ingestDocument.getMetadata().clone();
// the document listener gives us three-way logic: a document can fail processing (1), or it can
// be successfully processed. a successfully processed document can be kept (2) or dropped (3).
@@ -820,7 +811,6 @@ public void onFailure(Exception e) {
);
executePipelines(pipelines, indexRequest, ingestDocument, resolveFailureStore, documentListener);
- indexRequest.setNormalisedBytesParsed(meteringParserDecorator.meteredDocumentSize().ingestedBytes());
assert actionRequest.index() != null;
i++;
@@ -1159,14 +1149,14 @@ static String getProcessorName(Processor processor) {
/**
* Builds a new ingest document from the passed-in index request.
*/
- private static IngestDocument newIngestDocument(final IndexRequest request, XContentParserDecorator parserDecorator) {
+ private static IngestDocument newIngestDocument(final IndexRequest request) {
return new IngestDocument(
request.index(),
request.id(),
request.version(),
request.routing(),
request.versionType(),
- request.sourceAsMap(parserDecorator)
+ request.sourceAsMap(XContentParserDecorator.NOOP)
);
}
diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java
index 784e02059823b..0a88a202ac8d3 100644
--- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java
+++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java
@@ -285,7 +285,7 @@ static NodeConstruction prepareConstruction(
ScriptService scriptService = constructor.createScriptService(settingsModule, threadPool, serviceProvider);
- constructor.createUpdateHelper(documentParsingProvider, scriptService);
+ constructor.createUpdateHelper(scriptService);
constructor.construct(
threadPool,
@@ -643,10 +643,10 @@ private DataStreamGlobalRetentionSettings createDataStreamServicesAndGlobalReten
return dataStreamGlobalRetentionSettings;
}
- private UpdateHelper createUpdateHelper(DocumentParsingProvider documentParsingProvider, ScriptService scriptService) {
- UpdateHelper updateHelper = new UpdateHelper(scriptService, documentParsingProvider);
+ private UpdateHelper createUpdateHelper(ScriptService scriptService) {
+ UpdateHelper updateHelper = new UpdateHelper(scriptService);
- modules.add(b -> { b.bind(UpdateHelper.class).toInstance(new UpdateHelper(scriptService, documentParsingProvider)); });
+ modules.add(b -> b.bind(UpdateHelper.class).toInstance(updateHelper));
return updateHelper;
}
@@ -701,7 +701,6 @@ private void construct(
pluginsService.filterPlugins(IngestPlugin.class).toList(),
client,
IngestService.createGrokThreadWatchdog(environment, threadPool),
- documentParsingProvider,
failureStoreMetrics
);
diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java
index e1613caf9deac..9df7fd4c3bd43 100644
--- a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java
+++ b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java
@@ -9,7 +9,7 @@
package org.elasticsearch.plugins.internal;
-import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.index.mapper.MapperService;
/**
@@ -40,7 +40,7 @@ default DocumentSizeAccumulator createDocumentSizeAccumulator() {
/**
* @return an observer
*/
- default XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) {
+ default XContentMeteringParserDecorator newMeteringParserDecorator(IndexRequest request) {
return XContentMeteringParserDecorator.NOOP;
}
}
diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java b/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java
index e3b4415edcc01..6ccdac19acb91 100644
--- a/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java
+++ b/server/src/main/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecorator.java
@@ -9,17 +9,17 @@
package org.elasticsearch.plugins.internal;
-import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize;
import org.elasticsearch.xcontent.XContentParser;
public interface XContentMeteringParserDecorator extends XContentParserDecorator {
+ long UNKNOWN_SIZE = -1;
/**
* a default noop implementation
*/
XContentMeteringParserDecorator NOOP = new XContentMeteringParserDecorator() {
@Override
- public DocumentSize meteredDocumentSize() {
- return DocumentSize.UNKNOWN;
+ public long meteredDocumentSize() {
+ return UNKNOWN_SIZE;
}
@Override
@@ -28,5 +28,5 @@ public XContentParser decorate(XContentParser xContentParser) {
}
};
- DocumentSize meteredDocumentSize();
+ long meteredDocumentSize();
}
diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java
index 35ef892da59a2..b389e33993b9b 100644
--- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java
@@ -49,11 +49,11 @@
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.plugins.internal.DocumentParsingProvider;
+import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Names;
-import org.mockito.ArgumentCaptor;
import org.mockito.MockingDetails;
import org.mockito.Mockito;
import org.mockito.stubbing.Stubbing;
@@ -114,13 +114,18 @@ public void testExecuteBulkIndexRequest() throws Exception {
BulkItemRequest[] items = new BulkItemRequest[1];
boolean create = randomBoolean();
- DocWriteRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE).create(create);
+ IndexRequest writeRequest = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE).create(create);
BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest);
items[0] = primaryRequest;
BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items);
randomlySetIgnoredPrimaryResponse(primaryRequest);
+ DocumentParsingProvider documentParsingProvider = mock();
+ XContentMeteringParserDecorator parserDecorator = mock();
+ when(documentParsingProvider.newMeteringParserDecorator(any())).thenReturn(parserDecorator);
+ when(parserDecorator.decorate(any())).then(i -> i.getArgument(0));
+
BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard);
TransportShardBulkAction.executeBulkItemRequest(
context,
@@ -129,7 +134,7 @@ public void testExecuteBulkIndexRequest() throws Exception {
new NoopMappingUpdatePerformer(),
(listener, mappingVersion) -> {},
ASSERTING_DONE_LISTENER,
- DocumentParsingProvider.EMPTY_INSTANCE
+ documentParsingProvider
);
assertFalse(context.hasMoreOperationsToExecute());
@@ -185,6 +190,8 @@ public void testExecuteBulkIndexRequest() throws Exception {
assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT));
assertThat(replicaRequest, equalTo(primaryRequest));
+ verify(documentParsingProvider).newMeteringParserDecorator(any());
+ verify(parserDecorator).decorate(any());
// Assert that the document count is still 1
assertDocCount(shard, 1);
@@ -600,9 +607,7 @@ public void testUpdateRequestWithConflictFailure() throws Exception {
.retryOnConflict(retries);
BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest);
- IndexRequest updateResponse = new IndexRequest("index").id("id")
- .source(Requests.INDEX_CONTENT_TYPE, "field", "value")
- .setNormalisedBytesParsed(0);// let's pretend this was modified by a script
+ IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value");
DocumentParsingProvider documentParsingProvider = mock(DocumentParsingProvider.class);
Exception err = new VersionConflictEngineException(shardId, "id", "I'm conflicted <(;_;)>");
@@ -655,11 +660,7 @@ public void testUpdateRequestWithConflictFailure() throws Exception {
assertThat(failure.getCause(), equalTo(err));
assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT));
- // we have set 0 value on normalisedBytesParsed on the IndexRequest, like it happens with updates by script.
- ArgumentCaptor argument = ArgumentCaptor.forClass(IndexRequest.class);
- verify(documentParsingProvider, times(retries + 1)).newMeteringParserDecorator(argument.capture());
- IndexRequest value = argument.getValue();
- assertThat(value.getNormalisedBytesParsed(), equalTo(0L));
+ verify(documentParsingProvider, times(retries + 1)).newMeteringParserDecorator(any());
}
@SuppressWarnings("unchecked")
@@ -668,9 +669,7 @@ public void testUpdateRequestWithSuccess() throws Exception {
DocWriteRequest writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value");
BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest);
- IndexRequest updateResponse = new IndexRequest("index").id("id")
- .source(Requests.INDEX_CONTENT_TYPE, "field", "value")
- .setNormalisedBytesParsed(100L);
+ IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value");
DocumentParsingProvider documentParsingProvider = mock(DocumentParsingProvider.class);
boolean created = randomBoolean();
@@ -721,10 +720,7 @@ public void testUpdateRequestWithSuccess() throws Exception {
assertThat(response.status(), equalTo(created ? RestStatus.CREATED : RestStatus.OK));
assertThat(response.getSeqNo(), equalTo(13L));
- ArgumentCaptor argument = ArgumentCaptor.forClass(IndexRequest.class);
- verify(documentParsingProvider, times(1)).newMeteringParserDecorator(argument.capture());
- IndexRequest value = argument.getValue();
- assertThat(value.getNormalisedBytesParsed(), equalTo(100L));
+ verify(documentParsingProvider).newMeteringParserDecorator(updateResponse);
}
public void testUpdateWithDelete() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java
index 9729b653ae3d2..331f754d437a7 100644
--- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java
@@ -31,7 +31,6 @@
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.ProcessorInfo;
import org.elasticsearch.plugins.IngestPlugin;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
import org.elasticsearch.reservedstate.TransformState;
import org.elasticsearch.reservedstate.service.FileSettingsService;
import org.elasticsearch.reservedstate.service.ReservedClusterStateService;
@@ -94,7 +93,6 @@ public void setup() {
Collections.singletonList(DUMMY_PLUGIN),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
);
Map factories = ingestService.getProcessorFactories();
diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
index d8960bd902ac5..0cc2dcf38e8ff 100644
--- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
+++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
@@ -26,7 +26,6 @@
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngine;
@@ -121,7 +120,7 @@ public void setUp() throws Exception {
final MockScriptEngine engine = new MockScriptEngine("mock", scripts, Collections.emptyMap());
Map engines = Collections.singletonMap(engine.getType(), engine);
ScriptService scriptService = new ScriptService(baseSettings, engines, ScriptModule.CORE_CONTEXTS, () -> 1L);
- updateHelper = new UpdateHelper(scriptService, DocumentParsingProvider.EMPTY_INSTANCE);
+ updateHelper = new UpdateHelper(scriptService);
}
@SuppressWarnings("unchecked")
@@ -594,7 +593,7 @@ public void testNoopDetection() throws Exception {
try (var parser = createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"foo\"}}"))) {
request = new UpdateRequest("test", "1").fromXContent(parser);
}
- UpdateHelper updateHelper = new UpdateHelper(mock(ScriptService.class), DocumentParsingProvider.EMPTY_INSTANCE);
+ UpdateHelper updateHelper = new UpdateHelper(mock(ScriptService.class));
UpdateHelper.Result result = updateHelper.prepareUpdateIndexRequest(indexShard, request, getResult, true);
assertThat(result.action(), instanceOf(UpdateResponse.class));
diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java
index ba78ea5cf08a6..75f5045c5fbb6 100644
--- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java
+++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java
@@ -1522,4 +1522,38 @@ public void testDeprecationPropertyValidation() {
() -> Setting.boolSetting("a.bool.setting", true, Property.DeprecatedWarning, Property.IndexSettingDeprecatedInV7AndRemovedInV8)
);
}
+
+ public void testIntSettingBounds() {
+ Setting setting = Setting.intSetting("int.setting", 0, Integer.MIN_VALUE, Integer.MAX_VALUE);
+ var e = expectThrows(
+ IllegalArgumentException.class,
+ () -> setting.get(Settings.builder().put("int.setting", "2147483648").build())
+ );
+ assertThat(e.getMessage(), equalTo("Failed to parse value [2147483648] for setting [int.setting] must be <= 2147483647"));
+ var e2 = expectThrows(
+ IllegalArgumentException.class,
+ () -> setting.get(Settings.builder().put("int.setting", "-2147483649").build())
+ );
+ assertThat(e2.getMessage(), equalTo("Failed to parse value [-2147483649] for setting [int.setting] must be >= -2147483648"));
+ }
+
+ public void testLongSettingBounds() {
+ Setting setting = Setting.longSetting("long.setting", 0, Long.MIN_VALUE);
+ var e = expectThrows(
+ IllegalArgumentException.class,
+ () -> setting.get(Settings.builder().put("long.setting", "9223372036854775808").build())
+ );
+ assertThat(
+ e.getMessage(),
+ equalTo("Failed to parse value [9223372036854775808] for setting [long.setting] must be <= 9223372036854775807")
+ );
+ var e2 = expectThrows(
+ IllegalArgumentException.class,
+ () -> setting.get(Settings.builder().put("long.setting", "-9223372036854775809").build())
+ );
+ assertThat(
+ e2.getMessage(),
+ equalTo("Failed to parse value [-9223372036854775809] for setting [long.setting] must be >= -9223372036854775808")
+ );
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
index 753602e73a30a..c626be7983c46 100644
--- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
+++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java
@@ -28,10 +28,10 @@
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineTestCase;
import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentParseException;
import org.elasticsearch.xcontent.XContentType;
@@ -217,7 +217,7 @@ public void testSlowLogMessageHasJsonFields() throws IOException {
source,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[]
@@ -246,7 +246,7 @@ public void testSlowLogMessageHasAdditionalFields() throws IOException {
source,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[]
@@ -276,7 +276,7 @@ public void testEmptyRoutingField() throws IOException {
source,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
Index index = new Index("foo", "123");
@@ -295,7 +295,7 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
source,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[]
@@ -327,7 +327,7 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
source,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
final XContentParseException e = expectThrows(
diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index 21aefd893de70..bba1fa338559f 100644
--- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -109,7 +109,6 @@
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.Uid;
@@ -132,6 +131,7 @@
import org.elasticsearch.index.translog.TranslogOperationsUtils;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.recovery.RecoverySettings;
+import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.index.IndexVersionUtils;
import org.elasticsearch.threadpool.ThreadPool;
@@ -5522,7 +5522,7 @@ public void testSeqNoGenerator() throws IOException {
source,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
final Engine.Index index = new Engine.Index(
diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java
index ccf0bbebcc354..9e7f5fbbce1a3 100644
--- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java
+++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java
@@ -45,7 +45,6 @@
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.seqno.RetentionLeases;
@@ -54,6 +53,7 @@
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogConfig;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
+import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -567,7 +567,7 @@ private Engine.IndexResult index(String id, String testFieldValue) throws IOExce
source,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
Engine.Index index = new Engine.Index(uid, engine.config().getPrimaryTermSupplier().getAsLong(), doc);
return engine.index(index);
diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
index d0cabd609158b..97f49df41d099 100644
--- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
+++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
@@ -56,7 +56,6 @@
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.ParsedDocument.DocumentSize;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.seqno.LocalCheckpointTracker;
@@ -64,6 +63,7 @@
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog.Location;
+import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.TransportVersionUtils;
@@ -3395,7 +3395,7 @@ public void testTranslogOpSerialization() throws Exception {
B_1,
XContentType.JSON,
null,
- DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
Engine.Index eIndex = new Engine.Index(
diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java
index d83fdbd5dd46b..b3ddc313eaf3a 100644
--- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java
@@ -54,10 +54,7 @@
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.VersionType;
-import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.plugins.IngestPlugin;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
-import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptModule;
@@ -68,7 +65,6 @@
import org.elasticsearch.test.MockLog;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xcontent.XContentBuilder;
-import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xcontent.cbor.CborXContent;
import org.junit.Before;
@@ -157,7 +153,6 @@ public void testIngestPlugin() {
List.of(DUMMY_PLUGIN),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
);
Map factories = ingestService.getProcessorFactories();
@@ -178,7 +173,6 @@ public void testIngestPluginDuplicate() {
List.of(DUMMY_PLUGIN, DUMMY_PLUGIN),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
)
);
@@ -196,7 +190,6 @@ public void testExecuteIndexPipelineDoesNotExist() {
List.of(DUMMY_PLUGIN),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
);
final IndexRequest indexRequest = new IndexRequest("_index").id("_id")
@@ -1194,66 +1187,6 @@ public void testExecuteBulkPipelineDoesNotExist() {
verify(completionHandler, times(1)).accept(Thread.currentThread(), null);
}
- public void testExecuteBulkRequestCallsDocumentSizeObserver() {
- /*
- * This test makes sure that for both insert and upsert requests, when we call executeBulkRequest DocumentSizeObserver is
- * called using a non-null index name.
- */
- AtomicInteger wrappedObserverWasUsed = new AtomicInteger(0);
- AtomicInteger parsedValueWasUsed = new AtomicInteger(0);
- DocumentParsingProvider documentParsingProvider = new DocumentParsingProvider() {
- @Override
- public XContentMeteringParserDecorator newMeteringParserDecorator(DocWriteRequest request) {
- return new XContentMeteringParserDecorator() {
- @Override
- public ParsedDocument.DocumentSize meteredDocumentSize() {
- parsedValueWasUsed.incrementAndGet();
- return new ParsedDocument.DocumentSize(0, 0);
- }
-
- @Override
- public XContentParser decorate(XContentParser xContentParser) {
- wrappedObserverWasUsed.incrementAndGet();
- return xContentParser;
- }
- };
- }
- };
- IngestService ingestService = createWithProcessors(
- Map.of("mock", (factories, tag, description, config) -> mockCompoundProcessor()),
- documentParsingProvider
- );
-
- PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}");
- ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty
- ClusterState previousClusterState = clusterState;
- clusterState = executePut(putRequest, clusterState);
- ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
-
- BulkRequest bulkRequest = new BulkRequest();
- UpdateRequest updateRequest = new UpdateRequest("_index", "_id1").upsert("{}", "{}");
- updateRequest.upsertRequest().setPipeline("_id");
- bulkRequest.add(updateRequest);
- IndexRequest indexRequest = new IndexRequest("_index").id("_id1").source(Map.of()).setPipeline("_id1");
- bulkRequest.add(indexRequest);
- @SuppressWarnings("unchecked")
- BiConsumer failureHandler = mock(BiConsumer.class);
- @SuppressWarnings("unchecked")
- final BiConsumer completionHandler = mock(BiConsumer.class);
- ingestService.executeBulkRequest(
- bulkRequest.numberOfActions(),
- bulkRequest.requests(),
- indexReq -> {},
- (s) -> false,
- (slot, targetIndex, e) -> fail("Should not be redirecting failures"),
- failureHandler,
- completionHandler,
- EsExecutors.DIRECT_EXECUTOR_SERVICE
- );
- assertThat(wrappedObserverWasUsed.get(), equalTo(2));
- assertThat(parsedValueWasUsed.get(), equalTo(2));
- }
-
public void testExecuteSuccess() {
IngestService ingestService = createWithProcessors(
Map.of("mock", (factories, tag, description, config) -> mockCompoundProcessor())
@@ -2271,7 +2204,6 @@ public Map getProcessors(Processor.Parameters paramet
List.of(testPlugin),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
);
ingestService.addIngestClusterStateListener(ingestClusterStateListener);
@@ -2611,7 +2543,6 @@ private void testUpdatingPipeline(String pipelineString) throws Exception {
List.of(DUMMY_PLUGIN),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
);
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, clusterState));
@@ -2921,13 +2852,6 @@ private static IngestService createWithProcessors() {
}
private static IngestService createWithProcessors(Map processors) {
- return createWithProcessors(processors, DocumentParsingProvider.EMPTY_INSTANCE);
- }
-
- private static IngestService createWithProcessors(
- Map processors,
- DocumentParsingProvider documentParsingProvider
- ) {
Client client = mock(Client.class);
ThreadPool threadPool = mock(ThreadPool.class);
when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE);
@@ -2946,7 +2870,6 @@ public Map getProcessors(final Processor.Parameters p
}),
client,
null,
- documentParsingProvider,
FailureStoreMetrics.NOOP
);
if (randomBoolean()) {
diff --git a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java
index 94b3607bd7608..e8115e7266176 100644
--- a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java
@@ -16,7 +16,6 @@
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.plugins.IngestPlugin;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xcontent.XContentType;
@@ -132,7 +131,6 @@ public Map getProcessors(final Processor.Parameters p
List.of(ingestPlugin),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
);
}
diff --git a/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java
index 8216d092bd683..caf4494986f6d 100644
--- a/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java
+++ b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java
@@ -428,7 +428,7 @@ private Matcher matchDouble(double value) {
// Most data (notably geo data) has values within bounds, and an absolute delta makes more sense.
double delta = (value > 1e28 || value < -1e28) ? Math.abs(value / 1e6)
: (value > 1e20 || value < -1e20) ? Math.abs(value / 1e10)
- : (value > 1e9 || value < -1e9) ? Math.abs(value / 1e15)
+ : (value > 1e8 || value < -1e8) ? Math.abs(value / 1e15)
: DELTA;
return closeTo(value, delta);
}
diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
index c46d98fe1cd8b..e0363d84ea4d2 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
@@ -2405,7 +2405,6 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() {
Collections.emptyList(),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
),
mockFeatureService,
@@ -2425,7 +2424,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() {
threadPool,
shardStateAction,
mappingUpdatedAction,
- new UpdateHelper(scriptService, DocumentParsingProvider.EMPTY_INSTANCE),
+ new UpdateHelper(scriptService),
actionFilters,
indexingMemoryLimits,
EmptySystemIndices.INSTANCE,
diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java
index 4713adf6cf01d..87c566d543d0f 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java
@@ -100,6 +100,7 @@
import org.elasticsearch.index.translog.TranslogDeletionPolicy;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
+import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -428,7 +429,7 @@ protected static ParsedDocument testParsedDocument(
source,
XContentType.JSON,
mappingUpdate,
- ParsedDocument.DocumentSize.UNKNOWN
+ XContentMeteringParserDecorator.UNKNOWN_SIZE
);
}
diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
index 5f64d123c1bed..d6709b00b4dbb 100644
--- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
@@ -81,6 +81,7 @@
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.DisabledQueryCache;
import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy;
+import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
@@ -749,6 +750,7 @@ protected void tes
new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING, false),
new SortedNumericSortField(DataStreamTimestampFieldMapper.DEFAULT_PATH, SortField.Type.LONG, true)
);
+ config.setParentField(Engine.ROOT_DOC_FIELD_NAME);
config.setIndexSort(sort);
}
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config);
diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java
index 753ce8283afca..3c7a18de536bc 100644
--- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java
+++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java
@@ -42,6 +42,7 @@
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.startsWith;
public class TimeSeriesRateAggregatorTests extends AggregatorTestCase {
@@ -155,14 +156,14 @@ public void testNestedWithinAutoDateHistogram() throws IOException {
AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"))
.withSplitLeavesIntoSeperateAggregators(false);
- expectThrows(IllegalArgumentException.class, () -> testCase(iw -> {
- for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) {
- iw.addDocument(document);
- }
- for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) {
- iw.addDocument(document);
- }
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testCase(iw -> {
+ iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14));
+ iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40));
}, verifier, aggTestConfig));
+ assertThat(
+ e.getMessage(),
+ startsWith("Wrapping a time-series rate aggregation within a DeferableBucketAggregator is not supported.")
+ );
}
private List docs(long startTimestamp, String dim, long... values) throws IOException {
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java
index c18f55a651408..b86935dcd03da 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java
@@ -56,6 +56,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@@ -1879,6 +1880,11 @@ public void testMvAppendValidation() {
Supplier supplier = () -> randomInt(fields.length - 1);
int first = supplier.get();
int second = randomValueOtherThan(first, supplier);
+ Function noText = (type) -> type.equals("text") ? "keyword" : type;
+ assumeTrue(
+ "Ignore tests with TEXT and KEYWORD combinations because they are now valid",
+ noText.apply(fields[first][0]).equals(noText.apply(fields[second][0])) == false
+ );
String signature = "mv_append(" + fields[first][0] + ", " + fields[second][0] + ")";
verifyUnsupported(
@@ -1886,7 +1892,7 @@ public void testMvAppendValidation() {
"second argument of ["
+ signature
+ "] must be ["
- + fields[first][1]
+ + noText.apply(fields[first][1])
+ "], found value ["
+ fields[second][0]
+ "] type ["
diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
index 37de2caadb475..53c82219e2f12 100644
--- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
+++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java
@@ -307,8 +307,7 @@ public void testSupportedStream() throws Exception {
assertEquals(modelId, singleModel.get("inference_id"));
assertEquals(TaskType.COMPLETION.toString(), singleModel.get("task_type"));
- var input = IntStream.range(1, randomInt(10)).mapToObj(i -> randomAlphaOfLength(10)).toList();
-
+ var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphaOfLength(10)).toList();
try {
var events = streamInferOnMockService(modelId, TaskType.COMPLETION, input);
diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4 b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4
index cffa2db9f959a..dbf7c1979796a 100644
--- a/x-pack/plugin/kql/src/main/antlr/KqlBase.g4
+++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.g4
@@ -26,70 +26,68 @@ topLevelQuery
;
query
- : query (AND | OR) query #booleanQuery
- | NOT subQuery=simpleQuery #notQuery
- | simpleQuery #defaultQuery
+ : query operator=(AND | OR) query #booleanQuery
+ | NOT subQuery=simpleQuery #notQuery
+ | simpleQuery #defaultQuery
;
simpleQuery
: nestedQuery
- | expression
| parenthesizedQuery
- ;
-
-expression
- : fieldTermQuery
- | fieldRangeQuery
+ | matchAllQuery
+ | existsQuery
+ | rangeQuery
+ | fieldQuery
+ | fieldLessQuery
;
nestedQuery
: fieldName COLON LEFT_CURLY_BRACKET query RIGHT_CURLY_BRACKET
;
-parenthesizedQuery:
- LEFT_PARENTHESIS query RIGHT_PARENTHESIS;
-
-fieldRangeQuery
- : fieldName operator=OP_COMPARE rangeQueryValue
+matchAllQuery
+ : (WILDCARD COLON)? WILDCARD
;
-fieldTermQuery
- : (fieldName COLON)? termQueryValue
+parenthesizedQuery
+ : LEFT_PARENTHESIS query RIGHT_PARENTHESIS
;
-fieldName
- : wildcardExpression
- | unquotedLiteralExpression
- | quotedStringExpression
+rangeQuery
+ : fieldName operator=(OP_LESS|OP_LESS_EQ|OP_MORE|OP_MORE_EQ) rangeQueryValue
;
rangeQueryValue
- : unquotedLiteralExpression
- | quotedStringExpression
- ;
-
-termQueryValue
- : wildcardExpression
- | quotedStringExpression
- | termValue=unquotedLiteralExpression
- | groupingTermExpression;
+ : (UNQUOTED_LITERAL|WILDCARD)+
+ | QUOTED_STRING
+ ;
-groupingTermExpression
- : LEFT_PARENTHESIS unquotedLiteralExpression RIGHT_PARENTHESIS
+existsQuery
+ :fieldName COLON WILDCARD
;
-unquotedLiteralExpression
- : UNQUOTED_LITERAL+
+fieldQuery
+ : fieldName COLON fieldQueryValue
+ | fieldName COLON LEFT_PARENTHESIS fieldQueryValue RIGHT_PARENTHESIS
;
-quotedStringExpression
- : QUOTED_STRING
+fieldLessQuery
+ : fieldQueryValue
+ | LEFT_PARENTHESIS fieldQueryValue RIGHT_PARENTHESIS
;
-wildcardExpression
- : WILDCARD
-;
+fieldQueryValue
+ : (AND|OR)? (UNQUOTED_LITERAL | WILDCARD )+
+ | (UNQUOTED_LITERAL | WILDCARD )+ (AND|OR)?
+ | (NOT|AND|OR)
+ | QUOTED_STRING
+ ;
+fieldName
+ : value=UNQUOTED_LITERAL+
+ | value=QUOTED_STRING
+ | value=WILDCARD
+ ;
DEFAULT_SKIP: WHITESPACE -> skip;
@@ -98,31 +96,34 @@ OR: 'or';
NOT: 'not';
COLON: ':';
-OP_COMPARE: OP_LESS | OP_MORE | OP_LESS_EQ | OP_MORE_EQ;
+OP_LESS: '<';
+OP_LESS_EQ: '<=';
+OP_MORE: '>';
+OP_MORE_EQ: '>=';
LEFT_PARENTHESIS: '(';
RIGHT_PARENTHESIS: ')';
LEFT_CURLY_BRACKET: '{';
RIGHT_CURLY_BRACKET: '}';
-UNQUOTED_LITERAL: WILDCARD* UNQUOTED_LITERAL_CHAR+ WILDCARD*;
+UNQUOTED_LITERAL: UNQUOTED_LITERAL_CHAR+;
QUOTED_STRING: '"'QUOTED_CHAR*'"';
-WILDCARD: WILDCARD_CHAR+;
+WILDCARD: WILDCARD_CHAR;
fragment WILDCARD_CHAR: '*';
-fragment OP_LESS: '<';
-fragment OP_LESS_EQ: '<=';
-fragment OP_MORE: '>';
-fragment OP_MORE_EQ: '>=';
fragment UNQUOTED_LITERAL_CHAR
+ : WILDCARD_CHAR* UNQUOTED_LITERAL_BASE_CHAR WILDCARD_CHAR*
+ | WILDCARD_CHAR WILDCARD_CHAR+
+ ;
+
+fragment UNQUOTED_LITERAL_BASE_CHAR
: ESCAPED_WHITESPACE
| ESCAPED_SPECIAL_CHAR
| ESCAPE_UNICODE_SEQUENCE
| '\\' (AND | OR | NOT)
- | WILDCARD_CHAR UNQUOTED_LITERAL_CHAR
| NON_SPECIAL_CHAR
;
@@ -135,7 +136,7 @@ fragment QUOTED_CHAR
fragment WHITESPACE: [ \t\n\r\u3000];
fragment ESCAPED_WHITESPACE: '\\r' | '\\t' | '\\n';
-fragment NON_SPECIAL_CHAR: ~[ \\():<>"*{}];
+fragment NON_SPECIAL_CHAR: ~[ \n\r\t\u3000\\():<>"*{}];
fragment ESCAPED_SPECIAL_CHAR: '\\'[ \\():<>"*{}];
fragment ESCAPED_QUOTE: '\\"';
diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens b/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens
index 268ae0613b9f0..f26b6b9c3da55 100644
--- a/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens
+++ b/x-pack/plugin/kql/src/main/antlr/KqlBase.tokens
@@ -3,19 +3,26 @@ AND=2
OR=3
NOT=4
COLON=5
-OP_COMPARE=6
-LEFT_PARENTHESIS=7
-RIGHT_PARENTHESIS=8
-LEFT_CURLY_BRACKET=9
-RIGHT_CURLY_BRACKET=10
-UNQUOTED_LITERAL=11
-QUOTED_STRING=12
-WILDCARD=13
+OP_LESS=6
+OP_LESS_EQ=7
+OP_MORE=8
+OP_MORE_EQ=9
+LEFT_PARENTHESIS=10
+RIGHT_PARENTHESIS=11
+LEFT_CURLY_BRACKET=12
+RIGHT_CURLY_BRACKET=13
+UNQUOTED_LITERAL=14
+QUOTED_STRING=15
+WILDCARD=16
'and'=2
'or'=3
'not'=4
':'=5
-'('=7
-')'=8
-'{'=9
-'}'=10
+'<'=6
+'<='=7
+'>'=8
+'>='=9
+'('=10
+')'=11
+'{'=12
+'}'=13
diff --git a/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens b/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens
index 268ae0613b9f0..f26b6b9c3da55 100644
--- a/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens
+++ b/x-pack/plugin/kql/src/main/antlr/KqlBaseLexer.tokens
@@ -3,19 +3,26 @@ AND=2
OR=3
NOT=4
COLON=5
-OP_COMPARE=6
-LEFT_PARENTHESIS=7
-RIGHT_PARENTHESIS=8
-LEFT_CURLY_BRACKET=9
-RIGHT_CURLY_BRACKET=10
-UNQUOTED_LITERAL=11
-QUOTED_STRING=12
-WILDCARD=13
+OP_LESS=6
+OP_LESS_EQ=7
+OP_MORE=8
+OP_MORE_EQ=9
+LEFT_PARENTHESIS=10
+RIGHT_PARENTHESIS=11
+LEFT_CURLY_BRACKET=12
+RIGHT_CURLY_BRACKET=13
+UNQUOTED_LITERAL=14
+QUOTED_STRING=15
+WILDCARD=16
'and'=2
'or'=3
'not'=4
':'=5
-'('=7
-')'=8
-'{'=9
-'}'=10
+'<'=6
+'<='=7
+'>'=8
+'>='=9
+'('=10
+')'=11
+'{'=12
+'}'=13
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp
index 1954195b52363..111cac6d641b9 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBase.interp
@@ -5,7 +5,10 @@ null
'or'
'not'
':'
-null
+'<'
+'<='
+'>'
+'>='
'('
')'
'{'
@@ -21,7 +24,10 @@ AND
OR
NOT
COLON
-OP_COMPARE
+OP_LESS
+OP_LESS_EQ
+OP_MORE
+OP_MORE_EQ
LEFT_PARENTHESIS
RIGHT_PARENTHESIS
LEFT_CURLY_BRACKET
@@ -34,19 +40,17 @@ rule names:
topLevelQuery
query
simpleQuery
-expression
nestedQuery
+matchAllQuery
parenthesizedQuery
-fieldRangeQuery
-fieldTermQuery
-fieldName
+rangeQuery
rangeQueryValue
-termQueryValue
-groupingTermExpression
-unquotedLiteralExpression
-quotedStringExpression
-wildcardExpression
+existsQuery
+fieldQuery
+fieldLessQuery
+fieldQueryValue
+fieldName
atn:
-[4, 1, 13, 108, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 1, 0, 3, 0, 32, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 40, 8, 1, 1, 1, 1, 1, 1, 1, 5, 1, 45, 8, 1, 10, 1, 12, 1, 48, 9, 1, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 3, 3, 57, 8, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 3, 7, 76, 8, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 3, 8, 83, 8, 8, 1, 9, 1, 9, 3, 9, 87, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 93, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 4, 12, 100, 8, 12, 11, 12, 12, 12, 101, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 0, 1, 2, 15, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 0, 1, 1, 0, 2, 3, 106, 0, 31, 1, 0, 0, 0, 2, 39, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 56, 1, 0, 0, 0, 8, 58, 1, 0, 0, 0, 10, 64, 1, 0, 0, 0, 12, 68, 1, 0, 0, 0, 14, 75, 1, 0, 0, 0, 16, 82, 1, 0, 0, 0, 18, 86, 1, 0, 0, 0, 20, 92, 1, 0, 0, 0, 22, 94, 1, 0, 0, 0, 24, 99, 1, 0, 0, 0, 26, 103, 1, 0, 0, 0, 28, 105, 1, 0, 0, 0, 30, 32, 3, 2, 1, 0, 31, 30, 1, 0, 0, 0, 31, 32, 1, 0, 0, 0, 32, 33, 1, 0, 0, 0, 33, 34, 5, 0, 0, 1, 34, 1, 1, 0, 0, 0, 35, 36, 6, 1, -1, 0, 36, 37, 5, 4, 0, 0, 37, 40, 3, 4, 2, 0, 38, 40, 3, 4, 2, 0, 39, 35, 1, 0, 0, 0, 39, 38, 1, 0, 0, 0, 40, 46, 1, 0, 0, 0, 41, 42, 10, 3, 0, 0, 42, 43, 7, 0, 0, 0, 43, 45, 3, 2, 1, 4, 44, 41, 1, 0, 0, 0, 45, 48, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 46, 47, 1, 0, 0, 0, 47, 3, 1, 0, 0, 0, 48, 46, 1, 0, 0, 0, 49, 53, 3, 8, 4, 0, 50, 53, 3, 6, 3, 0, 51, 53, 3, 10, 5, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 57, 3, 14, 7, 0, 55, 57, 3, 12, 6, 0, 56, 54, 1, 0, 0, 0, 56, 55, 1, 0, 0, 0, 57, 7, 1, 0, 0, 0, 58, 59, 3, 16, 8, 0, 59, 60, 5, 5, 0, 0, 60, 61, 5, 9, 0, 0, 61, 62, 3, 2, 1, 0, 62, 63, 5, 10, 0, 0, 63, 9, 1, 0, 0, 0, 64, 65, 5, 7, 0, 0, 65, 66, 3, 2, 1, 0, 66, 67, 5, 8, 0, 0, 67, 11, 1, 0, 0, 0, 68, 69, 3, 16, 8, 0, 69, 70, 5, 6, 0, 0, 70, 71, 3, 18, 9, 0, 71, 13, 1, 0, 0, 0, 72, 73, 3, 16, 8, 0, 73, 74, 5, 5, 0, 0, 74, 76, 1, 0, 0, 0, 75, 72, 1, 0, 0, 0, 75, 76, 1, 0, 0, 0, 76, 77, 1, 0, 0, 0, 77, 78, 3, 20, 10, 0, 78, 15, 1, 0, 0, 0, 79, 83, 3, 28, 14, 0, 80, 83, 3, 24, 12, 0, 81, 83, 3, 26, 13, 0, 82, 79, 1, 0, 0, 0, 82, 80, 1, 0, 0, 0, 82, 81, 1, 0, 0, 0, 83, 17, 1, 0, 0, 0, 84, 87, 3, 24, 12, 0, 85, 87, 3, 26, 13, 0, 86, 84, 1, 0, 0, 0, 86, 85, 1, 0, 0, 0, 87, 19, 1, 0, 0, 0, 88, 93, 3, 28, 14, 0, 89, 93, 3, 26, 13, 0, 90, 93, 3, 24, 12, 0, 91, 93, 3, 22, 11, 0, 92, 88, 1, 0, 0, 0, 92, 89, 1, 0, 0, 0, 92, 90, 1, 0, 0, 0, 92, 91, 1, 0, 0, 0, 93, 21, 1, 0, 0, 0, 94, 95, 5, 7, 0, 0, 95, 96, 3, 24, 12, 0, 96, 97, 5, 8, 0, 0, 97, 23, 1, 0, 0, 0, 98, 100, 5, 11, 0, 0, 99, 98, 1, 0, 0, 0, 100, 101, 1, 0, 0, 0, 101, 99, 1, 0, 0, 0, 101, 102, 1, 0, 0, 0, 102, 25, 1, 0, 0, 0, 103, 104, 5, 12, 0, 0, 104, 27, 1, 0, 0, 0, 105, 106, 5, 13, 0, 0, 106, 29, 1, 0, 0, 0, 10, 31, 39, 46, 52, 56, 75, 82, 86, 92, 101]
\ No newline at end of file
+[4, 1, 16, 135, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 1, 0, 3, 0, 28, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 36, 8, 1, 1, 1, 1, 1, 1, 1, 5, 1, 41, 8, 1, 10, 1, 12, 1, 44, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 53, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 3, 4, 63, 8, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 4, 7, 76, 8, 7, 11, 7, 12, 7, 77, 1, 7, 3, 7, 81, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 97, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 104, 8, 10, 1, 11, 3, 11, 107, 8, 11, 1, 11, 4, 11, 110, 8, 11, 11, 11, 12, 11, 111, 1, 11, 4, 11, 115, 8, 11, 11, 11, 12, 11, 116, 1, 11, 3, 11, 120, 8, 11, 1, 11, 1, 11, 3, 11, 124, 8, 11, 1, 12, 4, 12, 127, 8, 12, 11, 12, 12, 12, 128, 1, 12, 1, 12, 3, 12, 133, 8, 12, 1, 12, 0, 1, 2, 13, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 0, 4, 1, 0, 2, 3, 1, 0, 6, 9, 2, 0, 14, 14, 16, 16, 1, 0, 2, 4, 145, 0, 27, 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 52, 1, 0, 0, 0, 6, 54, 1, 0, 0, 0, 8, 62, 1, 0, 0, 0, 10, 66, 1, 0, 0, 0, 12, 70, 1, 0, 0, 0, 14, 80, 1, 0, 0, 0, 16, 82, 1, 0, 0, 0, 18, 96, 1, 0, 0, 0, 20, 103, 1, 0, 0, 0, 22, 123, 1, 0, 0, 0, 24, 132, 1, 0, 0, 0, 26, 28, 3, 2, 1, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 29, 1, 0, 0, 0, 29, 30, 5, 0, 0, 1, 30, 1, 1, 0, 0, 0, 31, 32, 6, 1, -1, 0, 32, 33, 5, 4, 0, 0, 33, 36, 3, 4, 2, 0, 34, 36, 3, 4, 2, 0, 35, 31, 1, 0, 0, 0, 35, 34, 1, 0, 0, 0, 36, 42, 1, 0, 0, 0, 37, 38, 10, 3, 0, 0, 38, 39, 7, 0, 0, 0, 39, 41, 3, 2, 1, 3, 40, 37, 1, 0, 0, 0, 41, 44, 1, 0, 0, 0, 42, 40, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 3, 1, 0, 0, 0, 44, 42, 1, 0, 0, 0, 45, 53, 3, 6, 3, 0, 46, 53, 3, 10, 5, 0, 47, 53, 3, 8, 4, 0, 48, 53, 3, 16, 8, 0, 49, 53, 3, 12, 6, 0, 50, 53, 3, 18, 9, 0, 51, 53, 3, 20, 10, 0, 52, 45, 1, 0, 0, 0, 52, 46, 1, 0, 0, 0, 52, 47, 1, 0, 0, 0, 52, 48, 1, 0, 0, 0, 52, 49, 1, 0, 0, 0, 52, 50, 1, 0, 0, 0, 52, 51, 1, 0, 0, 0, 53, 5, 1, 0, 0, 0, 54, 55, 3, 24, 12, 0, 55, 56, 5, 5, 0, 0, 56, 57, 5, 12, 0, 0, 57, 58, 3, 2, 1, 0, 58, 59, 5, 13, 0, 0, 59, 7, 1, 0, 0, 0, 60, 61, 5, 16, 0, 0, 61, 63, 5, 5, 0, 0, 62, 60, 1, 0, 0, 0, 62, 63, 1, 0, 0, 0, 63, 64, 1, 0, 0, 0, 64, 65, 5, 16, 0, 0, 65, 9, 1, 0, 0, 0, 66, 67, 5, 10, 0, 0, 67, 68, 3, 2, 1, 0, 68, 69, 5, 11, 0, 0, 69, 11, 1, 0, 0, 0, 70, 71, 3, 24, 12, 0, 71, 72, 7, 1, 0, 0, 72, 73, 3, 14, 7, 0, 73, 13, 1, 0, 0, 0, 74, 76, 7, 2, 0, 0, 75, 74, 1, 0, 0, 0, 76, 77, 1, 0, 0, 0, 77, 75, 1, 0, 0, 0, 77, 78, 1, 0, 0, 0, 78, 81, 1, 0, 0, 0, 79, 81, 5, 15, 0, 0, 80, 75, 1, 0, 0, 0, 80, 79, 1, 0, 0, 0, 81, 15, 1, 0, 0, 0, 82, 83, 3, 24, 12, 0, 83, 84, 5, 5, 0, 0, 84, 85, 5, 16, 0, 0, 85, 17, 1, 0, 0, 0, 86, 87, 3, 24, 12, 0, 87, 88, 5, 5, 0, 0, 88, 89, 3, 22, 11, 0, 89, 97, 1, 0, 0, 0, 90, 91, 3, 24, 12, 0, 91, 92, 5, 5, 0, 0, 92, 93, 5, 10, 0, 0, 93, 94, 3, 22, 11, 0, 94, 95, 5, 11, 0, 0, 95, 97, 1, 0, 0, 0, 96, 86, 1, 0, 0, 0, 96, 90, 1, 0, 0, 0, 97, 19, 1, 0, 0, 0, 98, 104, 3, 22, 11, 0, 99, 100, 5, 10, 0, 0, 100, 101, 3, 22, 11, 0, 101, 102, 5, 11, 0, 0, 102, 104, 1, 0, 0, 0, 103, 98, 1, 0, 0, 0, 103, 99, 1, 0, 0, 0, 104, 21, 1, 0, 0, 0, 105, 107, 7, 0, 0, 0, 106, 105, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 109, 1, 0, 0, 0, 108, 110, 7, 2, 0, 0, 109, 108, 1, 0, 0, 0, 110, 111, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112, 1, 0, 0, 0, 112, 124, 1, 0, 0, 0, 113, 115, 7, 2, 0, 0, 114, 113, 1, 0, 0, 0, 115, 116, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 119, 1, 0, 0, 0, 118, 120, 7, 0, 0, 0, 119, 118, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 124, 1, 0, 0, 0, 121, 124, 7, 3, 0, 0, 122, 124, 5, 15, 0, 0, 123, 106, 1, 0, 0, 0, 123, 114, 1, 0, 0, 0, 123, 121, 1, 0, 0, 0, 123, 122, 1, 0, 0, 0, 124, 23, 1, 0, 0, 0, 125, 127, 5, 14, 0, 0, 126, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 133, 1, 0, 0, 0, 130, 133, 5, 15, 0, 0, 131, 133, 5, 16, 0, 0, 132, 126, 1, 0, 0, 0, 132, 130, 1, 0, 0, 0, 132, 131, 1, 0, 0, 0, 133, 25, 1, 0, 0, 0, 16, 27, 35, 42, 52, 62, 77, 80, 96, 103, 106, 111, 116, 119, 123, 128, 132]
\ No newline at end of file
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java
index 1b4282b5dbbea..426af7f7115b9 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseListener.java
@@ -80,18 +80,6 @@ class KqlBaseBaseListener implements KqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterExpression(KqlBaseParser.ExpressionContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitExpression(KqlBaseParser.ExpressionContext ctx) { }
/**
* {@inheritDoc}
*
@@ -109,49 +97,37 @@ class KqlBaseBaseListener implements KqlBaseListener {
*
* The default implementation does nothing.
*/
- @Override public void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { }
- /**
- * {@inheritDoc}
- *
- * The default implementation does nothing.
- */
- @Override public void enterFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { }
+ @Override public void enterMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { }
+ @Override public void exitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { }
+ @Override public void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { }
+ @Override public void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterFieldName(KqlBaseParser.FieldNameContext ctx) { }
+ @Override public void enterRangeQuery(KqlBaseParser.RangeQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitFieldName(KqlBaseParser.FieldNameContext ctx) { }
+ @Override public void exitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { }
/**
* {@inheritDoc}
*
@@ -169,61 +145,61 @@ class KqlBaseBaseListener implements KqlBaseListener {
*
* The default implementation does nothing.
*/
- @Override public void enterTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { }
+ @Override public void enterExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { }
+ @Override public void exitExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { }
+ @Override public void enterFieldQuery(KqlBaseParser.FieldQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { }
+ @Override public void exitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { }
+ @Override public void enterFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { }
+ @Override public void exitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { }
+ @Override public void enterFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { }
+ @Override public void exitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void enterWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { }
+ @Override public void enterFieldName(KqlBaseParser.FieldNameContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { }
+ @Override public void exitFieldName(KqlBaseParser.FieldNameContext ctx) { }
/**
* {@inheritDoc}
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java
index 09cd668804154..cf1f2b3972823 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseBaseVisitor.java
@@ -55,13 +55,6 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitExpression(KqlBaseParser.ExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -75,28 +68,21 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { return visitChildren(ctx); }
- /**
- * {@inheritDoc}
- *
- * The default implementation returns the result of calling
- * {@link #visitChildren} on {@code ctx}.
- */
- @Override public T visitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx) { return visitChildren(ctx); }
+ @Override public T visitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx) { return visitChildren(ctx); }
+ @Override public T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitFieldName(KqlBaseParser.FieldNameContext ctx) { return visitChildren(ctx); }
+ @Override public T visitRangeQuery(KqlBaseParser.RangeQueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -110,33 +96,33 @@ class KqlBaseBaseVisitor extends AbstractParseTreeVisitor implements KqlBa
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx) { return visitChildren(ctx); }
+ @Override public T visitExistsQuery(KqlBaseParser.ExistsQueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx) { return visitChildren(ctx); }
+ @Override public T visitFieldQuery(KqlBaseParser.FieldQueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx) { return visitChildren(ctx); }
+ @Override public T visitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx) { return visitChildren(ctx); }
+ @Override public T visitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx) { return visitChildren(ctx); }
+ @Override public T visitFieldName(KqlBaseParser.FieldNameContext ctx) { return visitChildren(ctx); }
}
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp
index d178df5fcbc88..f9afe07af3b40 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.interp
@@ -5,7 +5,10 @@ null
'or'
'not'
':'
-null
+'<'
+'<='
+'>'
+'>='
'('
')'
'{'
@@ -21,7 +24,10 @@ AND
OR
NOT
COLON
-OP_COMPARE
+OP_LESS
+OP_LESS_EQ
+OP_MORE
+OP_MORE_EQ
LEFT_PARENTHESIS
RIGHT_PARENTHESIS
LEFT_CURLY_BRACKET
@@ -36,7 +42,10 @@ AND
OR
NOT
COLON
-OP_COMPARE
+OP_LESS
+OP_LESS_EQ
+OP_MORE
+OP_MORE_EQ
LEFT_PARENTHESIS
RIGHT_PARENTHESIS
LEFT_CURLY_BRACKET
@@ -45,11 +54,8 @@ UNQUOTED_LITERAL
QUOTED_STRING
WILDCARD
WILDCARD_CHAR
-OP_LESS
-OP_LESS_EQ
-OP_MORE
-OP_MORE_EQ
UNQUOTED_LITERAL_CHAR
+UNQUOTED_LITERAL_BASE_CHAR
QUOTED_CHAR
WHITESPACE
ESCAPED_WHITESPACE
@@ -68,4 +74,4 @@ mode names:
DEFAULT_MODE
atn:
-[4, 0, 13, 181, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 79, 8, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 5, 10, 90, 8, 10, 10, 10, 12, 10, 93, 9, 10, 1, 10, 4, 10, 96, 8, 10, 11, 10, 12, 10, 97, 1, 10, 5, 10, 101, 8, 10, 10, 10, 12, 10, 104, 9, 10, 1, 11, 1, 11, 5, 11, 108, 8, 11, 10, 11, 12, 11, 111, 9, 11, 1, 11, 1, 11, 1, 12, 4, 12, 116, 8, 12, 11, 12, 12, 12, 117, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 139, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 145, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 151, 8, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 161, 8, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 0, 0, 28, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 0, 29, 0, 31, 0, 33, 0, 35, 0, 37, 0, 39, 0, 41, 0, 43, 0, 45, 0, 47, 0, 49, 0, 51, 0, 53, 0, 55, 0, 1, 0, 11, 2, 0, 65, 65, 97, 97, 2, 0, 78, 78, 110, 110, 2, 0, 68, 68, 100, 100, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 84, 84, 116, 116, 1, 0, 34, 34, 4, 0, 9, 10, 13, 13, 32, 32, 12288, 12288, 9, 0, 32, 32, 34, 34, 40, 42, 58, 58, 60, 60, 62, 62, 92, 92, 123, 123, 125, 125, 2, 0, 85, 85, 117, 117, 3, 0, 48, 57, 65, 70, 97, 102, 185, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 3, 61, 1, 0, 0, 0, 5, 65, 1, 0, 0, 0, 7, 68, 1, 0, 0, 0, 9, 72, 1, 0, 0, 0, 11, 78, 1, 0, 0, 0, 13, 80, 1, 0, 0, 0, 15, 82, 1, 0, 0, 0, 17, 84, 1, 0, 0, 0, 19, 86, 1, 0, 0, 0, 21, 91, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 115, 1, 0, 0, 0, 27, 119, 1, 0, 0, 0, 29, 121, 1, 0, 0, 0, 31, 123, 1, 0, 0, 0, 33, 126, 1, 0, 0, 0, 35, 128, 1, 0, 0, 0, 37, 144, 1, 0, 0, 0, 39, 150, 1, 0, 0, 0, 41, 152, 1, 0, 0, 0, 43, 160, 1, 0, 0, 0, 45, 162, 1, 0, 0, 0, 47, 164, 1, 0, 0, 0, 49, 167, 1, 0, 0, 0, 51, 170, 1, 0, 0, 0, 53, 173, 1, 0, 0, 0, 55, 179, 1, 0, 0, 0, 57, 58, 3, 41, 20, 0, 58, 59, 1, 0, 0, 0, 59, 60, 6, 0, 0, 0, 60, 2, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 63, 7, 1, 0, 0, 63, 64, 7, 2, 0, 0, 64, 4, 1, 0, 0, 0, 65, 66, 7, 3, 0, 0, 66, 67, 7, 4, 0, 0, 67, 6, 1, 0, 0, 0, 68, 69, 7, 1, 0, 0, 69, 70, 7, 3, 0, 0, 70, 71, 7, 5, 0, 0, 71, 8, 1, 0, 0, 0, 72, 73, 5, 58, 0, 0, 73, 10, 1, 0, 0, 0, 74, 79, 3, 29, 14, 0, 75, 79, 3, 33, 16, 0, 76, 79, 3, 31, 15, 0, 77, 79, 3, 35, 17, 0, 78, 74, 1, 0, 0, 0, 78, 75, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 78, 77, 1, 0, 0, 0, 79, 12, 1, 0, 0, 0, 80, 81, 5, 40, 0, 0, 81, 14, 1, 0, 0, 0, 82, 83, 5, 41, 0, 0, 83, 16, 1, 0, 0, 0, 84, 85, 5, 123, 0, 0, 85, 18, 1, 0, 0, 0, 86, 87, 5, 125, 0, 0, 87, 20, 1, 0, 0, 0, 88, 90, 3, 25, 12, 0, 89, 88, 1, 0, 0, 0, 90, 93, 1, 0, 0, 0, 91, 89, 1, 0, 0, 0, 91, 92, 1, 0, 0, 0, 92, 95, 1, 0, 0, 0, 93, 91, 1, 0, 0, 0, 94, 96, 3, 37, 18, 0, 95, 94, 1, 0, 0, 0, 96, 97, 1, 0, 0, 0, 97, 95, 1, 0, 0, 0, 97, 98, 1, 0, 0, 0, 98, 102, 1, 0, 0, 0, 99, 101, 3, 25, 12, 0, 100, 99, 1, 0, 0, 0, 101, 104, 1, 0, 0, 0, 102, 100, 1, 0, 0, 0, 102, 103, 1, 0, 0, 0, 103, 22, 1, 0, 0, 0, 104, 102, 1, 0, 0, 0, 105, 109, 5, 34, 0, 0, 106, 108, 3, 39, 19, 0, 107, 106, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 112, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 113, 5, 34, 0, 0, 113, 24, 1, 0, 0, 0, 114, 116, 3, 27, 13, 0, 115, 114, 1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 115, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 26, 1, 0, 0, 0, 119, 120, 5, 42, 0, 0, 120, 28, 1, 0, 0, 0, 121, 122, 5, 60, 0, 0, 122, 30, 1, 0, 0, 0, 123, 124, 5, 60, 0, 0, 124, 125, 5, 61, 0, 0, 125, 32, 1, 0, 0, 0, 126, 127, 5, 62, 0, 0, 127, 34, 1, 0, 0, 0, 128, 129, 5, 62, 0, 0, 129, 130, 5, 61, 0, 0, 130, 36, 1, 0, 0, 0, 131, 145, 3, 43, 21, 0, 132, 145, 3, 47, 23, 0, 133, 145, 3, 51, 25, 0, 134, 138, 5, 92, 0, 0, 135, 139, 3, 3, 1, 0, 136, 139, 3, 5, 2, 0, 137, 139, 3, 7, 3, 0, 138, 135, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 137, 1, 0, 0, 0, 139, 145, 1, 0, 0, 0, 140, 141, 3, 27, 13, 0, 141, 142, 3, 37, 18, 0, 142, 145, 1, 0, 0, 0, 143, 145, 3, 45, 22, 0, 144, 131, 1, 0, 0, 0, 144, 132, 1, 0, 0, 0, 144, 133, 1, 0, 0, 0, 144, 134, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 143, 1, 0, 0, 0, 145, 38, 1, 0, 0, 0, 146, 151, 3, 43, 21, 0, 147, 151, 3, 51, 25, 0, 148, 151, 3, 49, 24, 0, 149, 151, 8, 6, 0, 0, 150, 146, 1, 0, 0, 0, 150, 147, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 150, 149, 1, 0, 0, 0, 151, 40, 1, 0, 0, 0, 152, 153, 7, 7, 0, 0, 153, 42, 1, 0, 0, 0, 154, 155, 5, 92, 0, 0, 155, 161, 7, 4, 0, 0, 156, 157, 5, 92, 0, 0, 157, 161, 7, 5, 0, 0, 158, 159, 5, 92, 0, 0, 159, 161, 7, 1, 0, 0, 160, 154, 1, 0, 0, 0, 160, 156, 1, 0, 0, 0, 160, 158, 1, 0, 0, 0, 161, 44, 1, 0, 0, 0, 162, 163, 8, 8, 0, 0, 163, 46, 1, 0, 0, 0, 164, 165, 5, 92, 0, 0, 165, 166, 7, 8, 0, 0, 166, 48, 1, 0, 0, 0, 167, 168, 5, 92, 0, 0, 168, 169, 5, 34, 0, 0, 169, 50, 1, 0, 0, 0, 170, 171, 5, 92, 0, 0, 171, 172, 3, 53, 26, 0, 172, 52, 1, 0, 0, 0, 173, 174, 7, 9, 0, 0, 174, 175, 3, 55, 27, 0, 175, 176, 3, 55, 27, 0, 176, 177, 3, 55, 27, 0, 177, 178, 3, 55, 27, 0, 178, 54, 1, 0, 0, 0, 179, 180, 7, 10, 0, 0, 180, 56, 1, 0, 0, 0, 11, 0, 78, 91, 97, 102, 109, 117, 138, 144, 150, 160, 1, 6, 0, 0]
\ No newline at end of file
+[4, 0, 16, 178, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 4, 13, 94, 8, 13, 11, 13, 12, 13, 95, 1, 14, 1, 14, 5, 14, 100, 8, 14, 10, 14, 12, 14, 103, 9, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 5, 17, 112, 8, 17, 10, 17, 12, 17, 115, 9, 17, 1, 17, 1, 17, 5, 17, 119, 8, 17, 10, 17, 12, 17, 122, 9, 17, 1, 17, 1, 17, 4, 17, 126, 8, 17, 11, 17, 12, 17, 127, 3, 17, 130, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 139, 8, 18, 1, 18, 3, 18, 142, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 148, 8, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 158, 8, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 0, 0, 28, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 0, 35, 0, 37, 0, 39, 0, 41, 0, 43, 0, 45, 0, 47, 0, 49, 0, 51, 0, 53, 0, 55, 0, 1, 0, 12, 2, 0, 65, 65, 97, 97, 2, 0, 78, 78, 110, 110, 2, 0, 68, 68, 100, 100, 2, 0, 79, 79, 111, 111, 2, 0, 82, 82, 114, 114, 2, 0, 84, 84, 116, 116, 1, 0, 34, 34, 4, 0, 9, 10, 13, 13, 32, 32, 12288, 12288, 12, 0, 9, 10, 13, 13, 32, 32, 34, 34, 40, 42, 58, 58, 60, 60, 62, 62, 92, 92, 123, 123, 125, 125, 12288, 12288, 9, 0, 32, 32, 34, 34, 40, 42, 58, 58, 60, 60, 62, 62, 92, 92, 123, 123, 125, 125, 2, 0, 85, 85, 117, 117, 3, 0, 48, 57, 65, 70, 97, 102, 182, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 3, 61, 1, 0, 0, 0, 5, 65, 1, 0, 0, 0, 7, 68, 1, 0, 0, 0, 9, 72, 1, 0, 0, 0, 11, 74, 1, 0, 0, 0, 13, 76, 1, 0, 0, 0, 15, 79, 1, 0, 0, 0, 17, 81, 1, 0, 0, 0, 19, 84, 1, 0, 0, 0, 21, 86, 1, 0, 0, 0, 23, 88, 1, 0, 0, 0, 25, 90, 1, 0, 0, 0, 27, 93, 1, 0, 0, 0, 29, 97, 1, 0, 0, 0, 31, 106, 1, 0, 0, 0, 33, 108, 1, 0, 0, 0, 35, 129, 1, 0, 0, 0, 37, 141, 1, 0, 0, 0, 39, 147, 1, 0, 0, 0, 41, 149, 1, 0, 0, 0, 43, 157, 1, 0, 0, 0, 45, 159, 1, 0, 0, 0, 47, 161, 1, 0, 0, 0, 49, 164, 1, 0, 0, 0, 51, 167, 1, 0, 0, 0, 53, 170, 1, 0, 0, 0, 55, 176, 1, 0, 0, 0, 57, 58, 3, 41, 20, 0, 58, 59, 1, 0, 0, 0, 59, 60, 6, 0, 0, 0, 60, 2, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 63, 7, 1, 0, 0, 63, 64, 7, 2, 0, 0, 64, 4, 1, 0, 0, 0, 65, 66, 7, 3, 0, 0, 66, 67, 7, 4, 0, 0, 67, 6, 1, 0, 0, 0, 68, 69, 7, 1, 0, 0, 69, 70, 7, 3, 0, 0, 70, 71, 7, 5, 0, 0, 71, 8, 1, 0, 0, 0, 72, 73, 5, 58, 0, 0, 73, 10, 1, 0, 0, 0, 74, 75, 5, 60, 0, 0, 75, 12, 1, 0, 0, 0, 76, 77, 5, 60, 0, 0, 77, 78, 5, 61, 0, 0, 78, 14, 1, 0, 0, 0, 79, 80, 5, 62, 0, 0, 80, 16, 1, 0, 0, 0, 81, 82, 5, 62, 0, 0, 82, 83, 5, 61, 0, 0, 83, 18, 1, 0, 0, 0, 84, 85, 5, 40, 0, 0, 85, 20, 1, 0, 0, 0, 86, 87, 5, 41, 0, 0, 87, 22, 1, 0, 0, 0, 88, 89, 5, 123, 0, 0, 89, 24, 1, 0, 0, 0, 90, 91, 5, 125, 0, 0, 91, 26, 1, 0, 0, 0, 92, 94, 3, 35, 17, 0, 93, 92, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 93, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 28, 1, 0, 0, 0, 97, 101, 5, 34, 0, 0, 98, 100, 3, 39, 19, 0, 99, 98, 1, 0, 0, 0, 100, 103, 1, 0, 0, 0, 101, 99, 1, 0, 0, 0, 101, 102, 1, 0, 0, 0, 102, 104, 1, 0, 0, 0, 103, 101, 1, 0, 0, 0, 104, 105, 5, 34, 0, 0, 105, 30, 1, 0, 0, 0, 106, 107, 3, 33, 16, 0, 107, 32, 1, 0, 0, 0, 108, 109, 5, 42, 0, 0, 109, 34, 1, 0, 0, 0, 110, 112, 3, 33, 16, 0, 111, 110, 1, 0, 0, 0, 112, 115, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 116, 1, 0, 0, 0, 115, 113, 1, 0, 0, 0, 116, 120, 3, 37, 18, 0, 117, 119, 3, 33, 16, 0, 118, 117, 1, 0, 0, 0, 119, 122, 1, 0, 0, 0, 120, 118, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 130, 1, 0, 0, 0, 122, 120, 1, 0, 0, 0, 123, 125, 3, 33, 16, 0, 124, 126, 3, 33, 16, 0, 125, 124, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 130, 1, 0, 0, 0, 129, 113, 1, 0, 0, 0, 129, 123, 1, 0, 0, 0, 130, 36, 1, 0, 0, 0, 131, 142, 3, 43, 21, 0, 132, 142, 3, 47, 23, 0, 133, 142, 3, 51, 25, 0, 134, 138, 5, 92, 0, 0, 135, 139, 3, 3, 1, 0, 136, 139, 3, 5, 2, 0, 137, 139, 3, 7, 3, 0, 138, 135, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 137, 1, 0, 0, 0, 139, 142, 1, 0, 0, 0, 140, 142, 3, 45, 22, 0, 141, 131, 1, 0, 0, 0, 141, 132, 1, 0, 0, 0, 141, 133, 1, 0, 0, 0, 141, 134, 1, 0, 0, 0, 141, 140, 1, 0, 0, 0, 142, 38, 1, 0, 0, 0, 143, 148, 3, 43, 21, 0, 144, 148, 3, 51, 25, 0, 145, 148, 3, 49, 24, 0, 146, 148, 8, 6, 0, 0, 147, 143, 1, 0, 0, 0, 147, 144, 1, 0, 0, 0, 147, 145, 1, 0, 0, 0, 147, 146, 1, 0, 0, 0, 148, 40, 1, 0, 0, 0, 149, 150, 7, 7, 0, 0, 150, 42, 1, 0, 0, 0, 151, 152, 5, 92, 0, 0, 152, 158, 7, 4, 0, 0, 153, 154, 5, 92, 0, 0, 154, 158, 7, 5, 0, 0, 155, 156, 5, 92, 0, 0, 156, 158, 7, 1, 0, 0, 157, 151, 1, 0, 0, 0, 157, 153, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 158, 44, 1, 0, 0, 0, 159, 160, 8, 8, 0, 0, 160, 46, 1, 0, 0, 0, 161, 162, 5, 92, 0, 0, 162, 163, 7, 9, 0, 0, 163, 48, 1, 0, 0, 0, 164, 165, 5, 92, 0, 0, 165, 166, 5, 34, 0, 0, 166, 50, 1, 0, 0, 0, 167, 168, 5, 92, 0, 0, 168, 169, 3, 53, 26, 0, 169, 52, 1, 0, 0, 0, 170, 171, 7, 10, 0, 0, 171, 172, 3, 55, 27, 0, 172, 173, 3, 55, 27, 0, 173, 174, 3, 55, 27, 0, 174, 175, 3, 55, 27, 0, 175, 54, 1, 0, 0, 0, 176, 177, 7, 11, 0, 0, 177, 56, 1, 0, 0, 0, 11, 0, 95, 101, 113, 120, 127, 129, 138, 141, 147, 157, 1, 6, 0, 0]
\ No newline at end of file
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java
index b397a412d5e8e..f9353afd6e114 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseLexer.java
@@ -25,9 +25,9 @@ class KqlBaseLexer extends Lexer {
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
- DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_COMPARE=6, LEFT_PARENTHESIS=7,
- RIGHT_PARENTHESIS=8, LEFT_CURLY_BRACKET=9, RIGHT_CURLY_BRACKET=10, UNQUOTED_LITERAL=11,
- QUOTED_STRING=12, WILDCARD=13;
+ DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_LESS=6, OP_LESS_EQ=7,
+ OP_MORE=8, OP_MORE_EQ=9, LEFT_PARENTHESIS=10, RIGHT_PARENTHESIS=11, LEFT_CURLY_BRACKET=12,
+ RIGHT_CURLY_BRACKET=13, UNQUOTED_LITERAL=14, QUOTED_STRING=15, WILDCARD=16;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
@@ -38,28 +38,29 @@ class KqlBaseLexer extends Lexer {
private static String[] makeRuleNames() {
return new String[] {
- "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS",
- "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL",
- "QUOTED_STRING", "WILDCARD", "WILDCARD_CHAR", "OP_LESS", "OP_LESS_EQ",
- "OP_MORE", "OP_MORE_EQ", "UNQUOTED_LITERAL_CHAR", "QUOTED_CHAR", "WHITESPACE",
- "ESCAPED_WHITESPACE", "NON_SPECIAL_CHAR", "ESCAPED_SPECIAL_CHAR", "ESCAPED_QUOTE",
- "ESCAPE_UNICODE_SEQUENCE", "UNICODE_SEQUENCE", "HEX_DIGIT"
+ "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ",
+ "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET",
+ "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", "QUOTED_STRING", "WILDCARD",
+ "WILDCARD_CHAR", "UNQUOTED_LITERAL_CHAR", "UNQUOTED_LITERAL_BASE_CHAR",
+ "QUOTED_CHAR", "WHITESPACE", "ESCAPED_WHITESPACE", "NON_SPECIAL_CHAR",
+ "ESCAPED_SPECIAL_CHAR", "ESCAPED_QUOTE", "ESCAPE_UNICODE_SEQUENCE", "UNICODE_SEQUENCE",
+ "HEX_DIGIT"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
- null, null, "'and'", "'or'", "'not'", "':'", null, "'('", "')'", "'{'",
- "'}'"
+ null, null, "'and'", "'or'", "'not'", "':'", "'<'", "'<='", "'>'", "'>='",
+ "'('", "')'", "'{'", "'}'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
- null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS",
- "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL",
- "QUOTED_STRING", "WILDCARD"
+ null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ",
+ "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET",
+ "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", "QUOTED_STRING", "WILDCARD"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
@@ -121,119 +122,119 @@ public KqlBaseLexer(CharStream input) {
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
- "\u0004\u0000\r\u00b5\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001"+
- "\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004"+
- "\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007"+
- "\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b"+
- "\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002"+
- "\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002"+
- "\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002"+
- "\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002"+
- "\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002"+
- "\u001b\u0007\u001b\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+
- "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+
- "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+
- "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005O\b"+
- "\u0005\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+
- "\u0001\t\u0001\t\u0001\n\u0005\nZ\b\n\n\n\f\n]\t\n\u0001\n\u0004\n`\b"+
- "\n\u000b\n\f\na\u0001\n\u0005\ne\b\n\n\n\f\nh\t\n\u0001\u000b\u0001\u000b"+
- "\u0005\u000bl\b\u000b\n\u000b\f\u000bo\t\u000b\u0001\u000b\u0001\u000b"+
- "\u0001\f\u0004\ft\b\f\u000b\f\f\fu\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+
- "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0011"+
- "\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+
- "\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u008b\b\u0012\u0001\u0012"+
- "\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0091\b\u0012\u0001\u0013"+
- "\u0001\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u0097\b\u0013\u0001\u0014"+
- "\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+
- "\u0001\u0015\u0003\u0015\u00a1\b\u0015\u0001\u0016\u0001\u0016\u0001\u0017"+
- "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019"+
- "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+
- "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0000\u0000\u001c\u0001"+
- "\u0001\u0003\u0002\u0005\u0003\u0007\u0004\t\u0005\u000b\u0006\r\u0007"+
- "\u000f\b\u0011\t\u0013\n\u0015\u000b\u0017\f\u0019\r\u001b\u0000\u001d"+
- "\u0000\u001f\u0000!\u0000#\u0000%\u0000\'\u0000)\u0000+\u0000-\u0000/"+
- "\u00001\u00003\u00005\u00007\u0000\u0001\u0000\u000b\u0002\u0000AAaa\u0002"+
- "\u0000NNnn\u0002\u0000DDdd\u0002\u0000OOoo\u0002\u0000RRrr\u0002\u0000"+
- "TTtt\u0001\u0000\"\"\u0004\u0000\t\n\r\r \u3000\u3000\t\u0000 \"\"("+
- "*::<<>>\\\\{{}}\u0002\u0000UUuu\u0003\u000009AFaf\u00b9\u0000\u0001\u0001"+
- "\u0000\u0000\u0000\u0000\u0003\u0001\u0000\u0000\u0000\u0000\u0005\u0001"+
- "\u0000\u0000\u0000\u0000\u0007\u0001\u0000\u0000\u0000\u0000\t\u0001\u0000"+
- "\u0000\u0000\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000\u0000"+
- "\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000"+
- "\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000"+
- "\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000"+
- "\u0000\u00019\u0001\u0000\u0000\u0000\u0003=\u0001\u0000\u0000\u0000\u0005"+
- "A\u0001\u0000\u0000\u0000\u0007D\u0001\u0000\u0000\u0000\tH\u0001\u0000"+
- "\u0000\u0000\u000bN\u0001\u0000\u0000\u0000\rP\u0001\u0000\u0000\u0000"+
- "\u000fR\u0001\u0000\u0000\u0000\u0011T\u0001\u0000\u0000\u0000\u0013V"+
- "\u0001\u0000\u0000\u0000\u0015[\u0001\u0000\u0000\u0000\u0017i\u0001\u0000"+
- "\u0000\u0000\u0019s\u0001\u0000\u0000\u0000\u001bw\u0001\u0000\u0000\u0000"+
- "\u001dy\u0001\u0000\u0000\u0000\u001f{\u0001\u0000\u0000\u0000!~\u0001"+
- "\u0000\u0000\u0000#\u0080\u0001\u0000\u0000\u0000%\u0090\u0001\u0000\u0000"+
- "\u0000\'\u0096\u0001\u0000\u0000\u0000)\u0098\u0001\u0000\u0000\u0000"+
- "+\u00a0\u0001\u0000\u0000\u0000-\u00a2\u0001\u0000\u0000\u0000/\u00a4"+
- "\u0001\u0000\u0000\u00001\u00a7\u0001\u0000\u0000\u00003\u00aa\u0001\u0000"+
- "\u0000\u00005\u00ad\u0001\u0000\u0000\u00007\u00b3\u0001\u0000\u0000\u0000"+
- "9:\u0003)\u0014\u0000:;\u0001\u0000\u0000\u0000;<\u0006\u0000\u0000\u0000"+
- "<\u0002\u0001\u0000\u0000\u0000=>\u0007\u0000\u0000\u0000>?\u0007\u0001"+
- "\u0000\u0000?@\u0007\u0002\u0000\u0000@\u0004\u0001\u0000\u0000\u0000"+
- "AB\u0007\u0003\u0000\u0000BC\u0007\u0004\u0000\u0000C\u0006\u0001\u0000"+
- "\u0000\u0000DE\u0007\u0001\u0000\u0000EF\u0007\u0003\u0000\u0000FG\u0007"+
- "\u0005\u0000\u0000G\b\u0001\u0000\u0000\u0000HI\u0005:\u0000\u0000I\n"+
- "\u0001\u0000\u0000\u0000JO\u0003\u001d\u000e\u0000KO\u0003!\u0010\u0000"+
- "LO\u0003\u001f\u000f\u0000MO\u0003#\u0011\u0000NJ\u0001\u0000\u0000\u0000"+
- "NK\u0001\u0000\u0000\u0000NL\u0001\u0000\u0000\u0000NM\u0001\u0000\u0000"+
- "\u0000O\f\u0001\u0000\u0000\u0000PQ\u0005(\u0000\u0000Q\u000e\u0001\u0000"+
- "\u0000\u0000RS\u0005)\u0000\u0000S\u0010\u0001\u0000\u0000\u0000TU\u0005"+
- "{\u0000\u0000U\u0012\u0001\u0000\u0000\u0000VW\u0005}\u0000\u0000W\u0014"+
- "\u0001\u0000\u0000\u0000XZ\u0003\u0019\f\u0000YX\u0001\u0000\u0000\u0000"+
- "Z]\u0001\u0000\u0000\u0000[Y\u0001\u0000\u0000\u0000[\\\u0001\u0000\u0000"+
- "\u0000\\_\u0001\u0000\u0000\u0000][\u0001\u0000\u0000\u0000^`\u0003%\u0012"+
- "\u0000_^\u0001\u0000\u0000\u0000`a\u0001\u0000\u0000\u0000a_\u0001\u0000"+
- "\u0000\u0000ab\u0001\u0000\u0000\u0000bf\u0001\u0000\u0000\u0000ce\u0003"+
- "\u0019\f\u0000dc\u0001\u0000\u0000\u0000eh\u0001\u0000\u0000\u0000fd\u0001"+
- "\u0000\u0000\u0000fg\u0001\u0000\u0000\u0000g\u0016\u0001\u0000\u0000"+
- "\u0000hf\u0001\u0000\u0000\u0000im\u0005\"\u0000\u0000jl\u0003\'\u0013"+
- "\u0000kj\u0001\u0000\u0000\u0000lo\u0001\u0000\u0000\u0000mk\u0001\u0000"+
- "\u0000\u0000mn\u0001\u0000\u0000\u0000np\u0001\u0000\u0000\u0000om\u0001"+
- "\u0000\u0000\u0000pq\u0005\"\u0000\u0000q\u0018\u0001\u0000\u0000\u0000"+
- "rt\u0003\u001b\r\u0000sr\u0001\u0000\u0000\u0000tu\u0001\u0000\u0000\u0000"+
- "us\u0001\u0000\u0000\u0000uv\u0001\u0000\u0000\u0000v\u001a\u0001\u0000"+
- "\u0000\u0000wx\u0005*\u0000\u0000x\u001c\u0001\u0000\u0000\u0000yz\u0005"+
- "<\u0000\u0000z\u001e\u0001\u0000\u0000\u0000{|\u0005<\u0000\u0000|}\u0005"+
- "=\u0000\u0000} \u0001\u0000\u0000\u0000~\u007f\u0005>\u0000\u0000\u007f"+
- "\"\u0001\u0000\u0000\u0000\u0080\u0081\u0005>\u0000\u0000\u0081\u0082"+
- "\u0005=\u0000\u0000\u0082$\u0001\u0000\u0000\u0000\u0083\u0091\u0003+"+
- "\u0015\u0000\u0084\u0091\u0003/\u0017\u0000\u0085\u0091\u00033\u0019\u0000"+
- "\u0086\u008a\u0005\\\u0000\u0000\u0087\u008b\u0003\u0003\u0001\u0000\u0088"+
- "\u008b\u0003\u0005\u0002\u0000\u0089\u008b\u0003\u0007\u0003\u0000\u008a"+
- "\u0087\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000\u0000\u008a"+
- "\u0089\u0001\u0000\u0000\u0000\u008b\u0091\u0001\u0000\u0000\u0000\u008c"+
- "\u008d\u0003\u001b\r\u0000\u008d\u008e\u0003%\u0012\u0000\u008e\u0091"+
- "\u0001\u0000\u0000\u0000\u008f\u0091\u0003-\u0016\u0000\u0090\u0083\u0001"+
- "\u0000\u0000\u0000\u0090\u0084\u0001\u0000\u0000\u0000\u0090\u0085\u0001"+
- "\u0000\u0000\u0000\u0090\u0086\u0001\u0000\u0000\u0000\u0090\u008c\u0001"+
- "\u0000\u0000\u0000\u0090\u008f\u0001\u0000\u0000\u0000\u0091&\u0001\u0000"+
- "\u0000\u0000\u0092\u0097\u0003+\u0015\u0000\u0093\u0097\u00033\u0019\u0000"+
- "\u0094\u0097\u00031\u0018\u0000\u0095\u0097\b\u0006\u0000\u0000\u0096"+
- "\u0092\u0001\u0000\u0000\u0000\u0096\u0093\u0001\u0000\u0000\u0000\u0096"+
- "\u0094\u0001\u0000\u0000\u0000\u0096\u0095\u0001\u0000\u0000\u0000\u0097"+
- "(\u0001\u0000\u0000\u0000\u0098\u0099\u0007\u0007\u0000\u0000\u0099*\u0001"+
- "\u0000\u0000\u0000\u009a\u009b\u0005\\\u0000\u0000\u009b\u00a1\u0007\u0004"+
- "\u0000\u0000\u009c\u009d\u0005\\\u0000\u0000\u009d\u00a1\u0007\u0005\u0000"+
- "\u0000\u009e\u009f\u0005\\\u0000\u0000\u009f\u00a1\u0007\u0001\u0000\u0000"+
- "\u00a0\u009a\u0001\u0000\u0000\u0000\u00a0\u009c\u0001\u0000\u0000\u0000"+
- "\u00a0\u009e\u0001\u0000\u0000\u0000\u00a1,\u0001\u0000\u0000\u0000\u00a2"+
- "\u00a3\b\b\u0000\u0000\u00a3.\u0001\u0000\u0000\u0000\u00a4\u00a5\u0005"+
- "\\\u0000\u0000\u00a5\u00a6\u0007\b\u0000\u0000\u00a60\u0001\u0000\u0000"+
- "\u0000\u00a7\u00a8\u0005\\\u0000\u0000\u00a8\u00a9\u0005\"\u0000\u0000"+
- "\u00a92\u0001\u0000\u0000\u0000\u00aa\u00ab\u0005\\\u0000\u0000\u00ab"+
- "\u00ac\u00035\u001a\u0000\u00ac4\u0001\u0000\u0000\u0000\u00ad\u00ae\u0007"+
- "\t\u0000\u0000\u00ae\u00af\u00037\u001b\u0000\u00af\u00b0\u00037\u001b"+
- "\u0000\u00b0\u00b1\u00037\u001b\u0000\u00b1\u00b2\u00037\u001b\u0000\u00b2"+
- "6\u0001\u0000\u0000\u0000\u00b3\u00b4\u0007\n\u0000\u0000\u00b48\u0001"+
- "\u0000\u0000\u0000\u000b\u0000N[afmu\u008a\u0090\u0096\u00a0\u0001\u0006"+
- "\u0000\u0000";
+ "\u0004\u0000\u0010\u00b2\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002"+
+ "\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002"+
+ "\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002"+
+ "\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002"+
+ "\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e"+
+ "\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011"+
+ "\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014"+
+ "\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017"+
+ "\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a"+
+ "\u0002\u001b\u0007\u001b\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+
+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002"+
+ "\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+
+ "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006"+
+ "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+
+ "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0004\r^\b"+
+ "\r\u000b\r\f\r_\u0001\u000e\u0001\u000e\u0005\u000ed\b\u000e\n\u000e\f"+
+ "\u000eg\t\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+
+ "\u0010\u0001\u0010\u0001\u0011\u0005\u0011p\b\u0011\n\u0011\f\u0011s\t"+
+ "\u0011\u0001\u0011\u0001\u0011\u0005\u0011w\b\u0011\n\u0011\f\u0011z\t"+
+ "\u0011\u0001\u0011\u0001\u0011\u0004\u0011~\b\u0011\u000b\u0011\f\u0011"+
+ "\u007f\u0003\u0011\u0082\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+
+ "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u008b\b\u0012\u0001"+
+ "\u0012\u0003\u0012\u008e\b\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+
+ "\u0013\u0003\u0013\u0094\b\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001"+
+ "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0003\u0015\u009e"+
+ "\b\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+
+ "\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+
+ "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+
+ "\u001b\u0001\u001b\u0000\u0000\u001c\u0001\u0001\u0003\u0002\u0005\u0003"+
+ "\u0007\u0004\t\u0005\u000b\u0006\r\u0007\u000f\b\u0011\t\u0013\n\u0015"+
+ "\u000b\u0017\f\u0019\r\u001b\u000e\u001d\u000f\u001f\u0010!\u0000#\u0000"+
+ "%\u0000\'\u0000)\u0000+\u0000-\u0000/\u00001\u00003\u00005\u00007\u0000"+
+ "\u0001\u0000\f\u0002\u0000AAaa\u0002\u0000NNnn\u0002\u0000DDdd\u0002\u0000"+
+ "OOoo\u0002\u0000RRrr\u0002\u0000TTtt\u0001\u0000\"\"\u0004\u0000\t\n\r"+
+ "\r \u3000\u3000\f\u0000\t\n\r\r \"\"(*::<<>>\\\\{{}}\u3000\u3000\t\u0000"+
+ " \"\"(*::<<>>\\\\{{}}\u0002\u0000UUuu\u0003\u000009AFaf\u00b6\u0000\u0001"+
+ "\u0001\u0000\u0000\u0000\u0000\u0003\u0001\u0000\u0000\u0000\u0000\u0005"+
+ "\u0001\u0000\u0000\u0000\u0000\u0007\u0001\u0000\u0000\u0000\u0000\t\u0001"+
+ "\u0000\u0000\u0000\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000"+
+ "\u0000\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001\u0000"+
+ "\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001\u0000"+
+ "\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001\u0000"+
+ "\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001\u0000"+
+ "\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u00019\u0001\u0000\u0000"+
+ "\u0000\u0003=\u0001\u0000\u0000\u0000\u0005A\u0001\u0000\u0000\u0000\u0007"+
+ "D\u0001\u0000\u0000\u0000\tH\u0001\u0000\u0000\u0000\u000bJ\u0001\u0000"+
+ "\u0000\u0000\rL\u0001\u0000\u0000\u0000\u000fO\u0001\u0000\u0000\u0000"+
+ "\u0011Q\u0001\u0000\u0000\u0000\u0013T\u0001\u0000\u0000\u0000\u0015V"+
+ "\u0001\u0000\u0000\u0000\u0017X\u0001\u0000\u0000\u0000\u0019Z\u0001\u0000"+
+ "\u0000\u0000\u001b]\u0001\u0000\u0000\u0000\u001da\u0001\u0000\u0000\u0000"+
+ "\u001fj\u0001\u0000\u0000\u0000!l\u0001\u0000\u0000\u0000#\u0081\u0001"+
+ "\u0000\u0000\u0000%\u008d\u0001\u0000\u0000\u0000\'\u0093\u0001\u0000"+
+ "\u0000\u0000)\u0095\u0001\u0000\u0000\u0000+\u009d\u0001\u0000\u0000\u0000"+
+ "-\u009f\u0001\u0000\u0000\u0000/\u00a1\u0001\u0000\u0000\u00001\u00a4"+
+ "\u0001\u0000\u0000\u00003\u00a7\u0001\u0000\u0000\u00005\u00aa\u0001\u0000"+
+ "\u0000\u00007\u00b0\u0001\u0000\u0000\u00009:\u0003)\u0014\u0000:;\u0001"+
+ "\u0000\u0000\u0000;<\u0006\u0000\u0000\u0000<\u0002\u0001\u0000\u0000"+
+ "\u0000=>\u0007\u0000\u0000\u0000>?\u0007\u0001\u0000\u0000?@\u0007\u0002"+
+ "\u0000\u0000@\u0004\u0001\u0000\u0000\u0000AB\u0007\u0003\u0000\u0000"+
+ "BC\u0007\u0004\u0000\u0000C\u0006\u0001\u0000\u0000\u0000DE\u0007\u0001"+
+ "\u0000\u0000EF\u0007\u0003\u0000\u0000FG\u0007\u0005\u0000\u0000G\b\u0001"+
+ "\u0000\u0000\u0000HI\u0005:\u0000\u0000I\n\u0001\u0000\u0000\u0000JK\u0005"+
+ "<\u0000\u0000K\f\u0001\u0000\u0000\u0000LM\u0005<\u0000\u0000MN\u0005"+
+ "=\u0000\u0000N\u000e\u0001\u0000\u0000\u0000OP\u0005>\u0000\u0000P\u0010"+
+ "\u0001\u0000\u0000\u0000QR\u0005>\u0000\u0000RS\u0005=\u0000\u0000S\u0012"+
+ "\u0001\u0000\u0000\u0000TU\u0005(\u0000\u0000U\u0014\u0001\u0000\u0000"+
+ "\u0000VW\u0005)\u0000\u0000W\u0016\u0001\u0000\u0000\u0000XY\u0005{\u0000"+
+ "\u0000Y\u0018\u0001\u0000\u0000\u0000Z[\u0005}\u0000\u0000[\u001a\u0001"+
+ "\u0000\u0000\u0000\\^\u0003#\u0011\u0000]\\\u0001\u0000\u0000\u0000^_"+
+ "\u0001\u0000\u0000\u0000_]\u0001\u0000\u0000\u0000_`\u0001\u0000\u0000"+
+ "\u0000`\u001c\u0001\u0000\u0000\u0000ae\u0005\"\u0000\u0000bd\u0003\'"+
+ "\u0013\u0000cb\u0001\u0000\u0000\u0000dg\u0001\u0000\u0000\u0000ec\u0001"+
+ "\u0000\u0000\u0000ef\u0001\u0000\u0000\u0000fh\u0001\u0000\u0000\u0000"+
+ "ge\u0001\u0000\u0000\u0000hi\u0005\"\u0000\u0000i\u001e\u0001\u0000\u0000"+
+ "\u0000jk\u0003!\u0010\u0000k \u0001\u0000\u0000\u0000lm\u0005*\u0000\u0000"+
+ "m\"\u0001\u0000\u0000\u0000np\u0003!\u0010\u0000on\u0001\u0000\u0000\u0000"+
+ "ps\u0001\u0000\u0000\u0000qo\u0001\u0000\u0000\u0000qr\u0001\u0000\u0000"+
+ "\u0000rt\u0001\u0000\u0000\u0000sq\u0001\u0000\u0000\u0000tx\u0003%\u0012"+
+ "\u0000uw\u0003!\u0010\u0000vu\u0001\u0000\u0000\u0000wz\u0001\u0000\u0000"+
+ "\u0000xv\u0001\u0000\u0000\u0000xy\u0001\u0000\u0000\u0000y\u0082\u0001"+
+ "\u0000\u0000\u0000zx\u0001\u0000\u0000\u0000{}\u0003!\u0010\u0000|~\u0003"+
+ "!\u0010\u0000}|\u0001\u0000\u0000\u0000~\u007f\u0001\u0000\u0000\u0000"+
+ "\u007f}\u0001\u0000\u0000\u0000\u007f\u0080\u0001\u0000\u0000\u0000\u0080"+
+ "\u0082\u0001\u0000\u0000\u0000\u0081q\u0001\u0000\u0000\u0000\u0081{\u0001"+
+ "\u0000\u0000\u0000\u0082$\u0001\u0000\u0000\u0000\u0083\u008e\u0003+\u0015"+
+ "\u0000\u0084\u008e\u0003/\u0017\u0000\u0085\u008e\u00033\u0019\u0000\u0086"+
+ "\u008a\u0005\\\u0000\u0000\u0087\u008b\u0003\u0003\u0001\u0000\u0088\u008b"+
+ "\u0003\u0005\u0002\u0000\u0089\u008b\u0003\u0007\u0003\u0000\u008a\u0087"+
+ "\u0001\u0000\u0000\u0000\u008a\u0088\u0001\u0000\u0000\u0000\u008a\u0089"+
+ "\u0001\u0000\u0000\u0000\u008b\u008e\u0001\u0000\u0000\u0000\u008c\u008e"+
+ "\u0003-\u0016\u0000\u008d\u0083\u0001\u0000\u0000\u0000\u008d\u0084\u0001"+
+ "\u0000\u0000\u0000\u008d\u0085\u0001\u0000\u0000\u0000\u008d\u0086\u0001"+
+ "\u0000\u0000\u0000\u008d\u008c\u0001\u0000\u0000\u0000\u008e&\u0001\u0000"+
+ "\u0000\u0000\u008f\u0094\u0003+\u0015\u0000\u0090\u0094\u00033\u0019\u0000"+
+ "\u0091\u0094\u00031\u0018\u0000\u0092\u0094\b\u0006\u0000\u0000\u0093"+
+ "\u008f\u0001\u0000\u0000\u0000\u0093\u0090\u0001\u0000\u0000\u0000\u0093"+
+ "\u0091\u0001\u0000\u0000\u0000\u0093\u0092\u0001\u0000\u0000\u0000\u0094"+
+ "(\u0001\u0000\u0000\u0000\u0095\u0096\u0007\u0007\u0000\u0000\u0096*\u0001"+
+ "\u0000\u0000\u0000\u0097\u0098\u0005\\\u0000\u0000\u0098\u009e\u0007\u0004"+
+ "\u0000\u0000\u0099\u009a\u0005\\\u0000\u0000\u009a\u009e\u0007\u0005\u0000"+
+ "\u0000\u009b\u009c\u0005\\\u0000\u0000\u009c\u009e\u0007\u0001\u0000\u0000"+
+ "\u009d\u0097\u0001\u0000\u0000\u0000\u009d\u0099\u0001\u0000\u0000\u0000"+
+ "\u009d\u009b\u0001\u0000\u0000\u0000\u009e,\u0001\u0000\u0000\u0000\u009f"+
+ "\u00a0\b\b\u0000\u0000\u00a0.\u0001\u0000\u0000\u0000\u00a1\u00a2\u0005"+
+ "\\\u0000\u0000\u00a2\u00a3\u0007\t\u0000\u0000\u00a30\u0001\u0000\u0000"+
+ "\u0000\u00a4\u00a5\u0005\\\u0000\u0000\u00a5\u00a6\u0005\"\u0000\u0000"+
+ "\u00a62\u0001\u0000\u0000\u0000\u00a7\u00a8\u0005\\\u0000\u0000\u00a8"+
+ "\u00a9\u00035\u001a\u0000\u00a94\u0001\u0000\u0000\u0000\u00aa\u00ab\u0007"+
+ "\n\u0000\u0000\u00ab\u00ac\u00037\u001b\u0000\u00ac\u00ad\u00037\u001b"+
+ "\u0000\u00ad\u00ae\u00037\u001b\u0000\u00ae\u00af\u00037\u001b\u0000\u00af"+
+ "6\u0001\u0000\u0000\u0000\u00b0\u00b1\u0007\u000b\u0000\u0000\u00b18\u0001"+
+ "\u0000\u0000\u0000\u000b\u0000_eqx\u007f\u0081\u008a\u008d\u0093\u009d"+
+ "\u0001\u0006\u0000\u0000";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java
index bce2044fa8175..505569dbde58d 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseListener.java
@@ -71,16 +71,6 @@ interface KqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx);
- /**
- * Enter a parse tree produced by {@link KqlBaseParser#expression}.
- * @param ctx the parse tree
- */
- void enterExpression(KqlBaseParser.ExpressionContext ctx);
- /**
- * Exit a parse tree produced by {@link KqlBaseParser#expression}.
- * @param ctx the parse tree
- */
- void exitExpression(KqlBaseParser.ExpressionContext ctx);
/**
* Enter a parse tree produced by {@link KqlBaseParser#nestedQuery}.
* @param ctx the parse tree
@@ -92,45 +82,35 @@ interface KqlBaseListener extends ParseTreeListener {
*/
void exitNestedQuery(KqlBaseParser.NestedQueryContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}.
- * @param ctx the parse tree
- */
- void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx);
- /**
- * Exit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}.
- * @param ctx the parse tree
- */
- void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx);
- /**
- * Enter a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}.
+ * Enter a parse tree produced by {@link KqlBaseParser#matchAllQuery}.
* @param ctx the parse tree
*/
- void enterFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx);
+ void enterMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}.
+ * Exit a parse tree produced by {@link KqlBaseParser#matchAllQuery}.
* @param ctx the parse tree
*/
- void exitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx);
+ void exitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#fieldTermQuery}.
+ * Enter a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}.
* @param ctx the parse tree
*/
- void enterFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx);
+ void enterParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#fieldTermQuery}.
+ * Exit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}.
* @param ctx the parse tree
*/
- void exitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx);
+ void exitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#fieldName}.
+ * Enter a parse tree produced by {@link KqlBaseParser#rangeQuery}.
* @param ctx the parse tree
*/
- void enterFieldName(KqlBaseParser.FieldNameContext ctx);
+ void enterRangeQuery(KqlBaseParser.RangeQueryContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#fieldName}.
+ * Exit a parse tree produced by {@link KqlBaseParser#rangeQuery}.
* @param ctx the parse tree
*/
- void exitFieldName(KqlBaseParser.FieldNameContext ctx);
+ void exitRangeQuery(KqlBaseParser.RangeQueryContext ctx);
/**
* Enter a parse tree produced by {@link KqlBaseParser#rangeQueryValue}.
* @param ctx the parse tree
@@ -142,53 +122,53 @@ interface KqlBaseListener extends ParseTreeListener {
*/
void exitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#termQueryValue}.
+ * Enter a parse tree produced by {@link KqlBaseParser#existsQuery}.
* @param ctx the parse tree
*/
- void enterTermQueryValue(KqlBaseParser.TermQueryValueContext ctx);
+ void enterExistsQuery(KqlBaseParser.ExistsQueryContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#termQueryValue}.
+ * Exit a parse tree produced by {@link KqlBaseParser#existsQuery}.
* @param ctx the parse tree
*/
- void exitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx);
+ void exitExistsQuery(KqlBaseParser.ExistsQueryContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#groupingTermExpression}.
+ * Enter a parse tree produced by {@link KqlBaseParser#fieldQuery}.
* @param ctx the parse tree
*/
- void enterGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx);
+ void enterFieldQuery(KqlBaseParser.FieldQueryContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#groupingTermExpression}.
+ * Exit a parse tree produced by {@link KqlBaseParser#fieldQuery}.
* @param ctx the parse tree
*/
- void exitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx);
+ void exitFieldQuery(KqlBaseParser.FieldQueryContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}.
+ * Enter a parse tree produced by {@link KqlBaseParser#fieldLessQuery}.
* @param ctx the parse tree
*/
- void enterUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx);
+ void enterFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}.
+ * Exit a parse tree produced by {@link KqlBaseParser#fieldLessQuery}.
* @param ctx the parse tree
*/
- void exitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx);
+ void exitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#quotedStringExpression}.
+ * Enter a parse tree produced by {@link KqlBaseParser#fieldQueryValue}.
* @param ctx the parse tree
*/
- void enterQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx);
+ void enterFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#quotedStringExpression}.
+ * Exit a parse tree produced by {@link KqlBaseParser#fieldQueryValue}.
* @param ctx the parse tree
*/
- void exitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx);
+ void exitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx);
/**
- * Enter a parse tree produced by {@link KqlBaseParser#wildcardExpression}.
+ * Enter a parse tree produced by {@link KqlBaseParser#fieldName}.
* @param ctx the parse tree
*/
- void enterWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx);
+ void enterFieldName(KqlBaseParser.FieldNameContext ctx);
/**
- * Exit a parse tree produced by {@link KqlBaseParser#wildcardExpression}.
+ * Exit a parse tree produced by {@link KqlBaseParser#fieldName}.
* @param ctx the parse tree
*/
- void exitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx);
+ void exitFieldName(KqlBaseParser.FieldNameContext ctx);
}
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java
index 3bd9cc4104d2c..3ee44e389a371 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java
@@ -25,37 +25,35 @@ class KqlBaseParser extends Parser {
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
- DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_COMPARE=6, LEFT_PARENTHESIS=7,
- RIGHT_PARENTHESIS=8, LEFT_CURLY_BRACKET=9, RIGHT_CURLY_BRACKET=10, UNQUOTED_LITERAL=11,
- QUOTED_STRING=12, WILDCARD=13;
+ DEFAULT_SKIP=1, AND=2, OR=3, NOT=4, COLON=5, OP_LESS=6, OP_LESS_EQ=7,
+ OP_MORE=8, OP_MORE_EQ=9, LEFT_PARENTHESIS=10, RIGHT_PARENTHESIS=11, LEFT_CURLY_BRACKET=12,
+ RIGHT_CURLY_BRACKET=13, UNQUOTED_LITERAL=14, QUOTED_STRING=15, WILDCARD=16;
public static final int
- RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_expression = 3,
- RULE_nestedQuery = 4, RULE_parenthesizedQuery = 5, RULE_fieldRangeQuery = 6,
- RULE_fieldTermQuery = 7, RULE_fieldName = 8, RULE_rangeQueryValue = 9,
- RULE_termQueryValue = 10, RULE_groupingTermExpression = 11, RULE_unquotedLiteralExpression = 12,
- RULE_quotedStringExpression = 13, RULE_wildcardExpression = 14;
+ RULE_topLevelQuery = 0, RULE_query = 1, RULE_simpleQuery = 2, RULE_nestedQuery = 3,
+ RULE_matchAllQuery = 4, RULE_parenthesizedQuery = 5, RULE_rangeQuery = 6,
+ RULE_rangeQueryValue = 7, RULE_existsQuery = 8, RULE_fieldQuery = 9, RULE_fieldLessQuery = 10,
+ RULE_fieldQueryValue = 11, RULE_fieldName = 12;
private static String[] makeRuleNames() {
return new String[] {
- "topLevelQuery", "query", "simpleQuery", "expression", "nestedQuery",
- "parenthesizedQuery", "fieldRangeQuery", "fieldTermQuery", "fieldName",
- "rangeQueryValue", "termQueryValue", "groupingTermExpression", "unquotedLiteralExpression",
- "quotedStringExpression", "wildcardExpression"
+ "topLevelQuery", "query", "simpleQuery", "nestedQuery", "matchAllQuery",
+ "parenthesizedQuery", "rangeQuery", "rangeQueryValue", "existsQuery",
+ "fieldQuery", "fieldLessQuery", "fieldQueryValue", "fieldName"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
- null, null, "'and'", "'or'", "'not'", "':'", null, "'('", "')'", "'{'",
- "'}'"
+ null, null, "'and'", "'or'", "'not'", "':'", "'<'", "'<='", "'>'", "'>='",
+ "'('", "')'", "'{'", "'}'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
- null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_COMPARE", "LEFT_PARENTHESIS",
- "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET", "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL",
- "QUOTED_STRING", "WILDCARD"
+ null, "DEFAULT_SKIP", "AND", "OR", "NOT", "COLON", "OP_LESS", "OP_LESS_EQ",
+ "OP_MORE", "OP_MORE_EQ", "LEFT_PARENTHESIS", "RIGHT_PARENTHESIS", "LEFT_CURLY_BRACKET",
+ "RIGHT_CURLY_BRACKET", "UNQUOTED_LITERAL", "QUOTED_STRING", "WILDCARD"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
@@ -141,17 +139,17 @@ public final TopLevelQueryContext topLevelQuery() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(31);
+ setState(27);
_errHandler.sync(this);
_la = _input.LA(1);
- if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 14480L) != 0)) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 115740L) != 0)) {
{
- setState(30);
+ setState(26);
query(0);
}
}
- setState(33);
+ setState(29);
match(EOF);
}
}
@@ -202,6 +200,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
}
@SuppressWarnings("CheckReturnValue")
public static class BooleanQueryContext extends QueryContext {
+ public Token operator;
public List query() {
return getRuleContexts(QueryContext.class);
}
@@ -262,38 +261,33 @@ private QueryContext query(int _p) throws RecognitionException {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(39);
+ setState(35);
_errHandler.sync(this);
- switch (_input.LA(1)) {
- case NOT:
+ switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) {
+ case 1:
{
_localctx = new NotQueryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(36);
+ setState(32);
match(NOT);
- setState(37);
+ setState(33);
((NotQueryContext)_localctx).subQuery = simpleQuery();
}
break;
- case LEFT_PARENTHESIS:
- case UNQUOTED_LITERAL:
- case QUOTED_STRING:
- case WILDCARD:
+ case 2:
{
_localctx = new DefaultQueryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(38);
+ setState(34);
simpleQuery();
}
break;
- default:
- throw new NoViableAltException(this);
}
_ctx.stop = _input.LT(-1);
- setState(46);
+ setState(42);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,2,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
@@ -304,24 +298,25 @@ private QueryContext query(int _p) throws RecognitionException {
{
_localctx = new BooleanQueryContext(new QueryContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_query);
- setState(41);
+ setState(37);
if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)");
- setState(42);
+ setState(38);
+ ((BooleanQueryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==AND || _la==OR) ) {
- _errHandler.recoverInline(this);
+ ((BooleanQueryContext)_localctx).operator = (Token)_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
- setState(43);
- query(4);
+ setState(39);
+ query(3);
}
}
}
- setState(48);
+ setState(44);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,2,_ctx);
}
@@ -343,12 +338,24 @@ public static class SimpleQueryContext extends ParserRuleContext {
public NestedQueryContext nestedQuery() {
return getRuleContext(NestedQueryContext.class,0);
}
- public ExpressionContext expression() {
- return getRuleContext(ExpressionContext.class,0);
- }
public ParenthesizedQueryContext parenthesizedQuery() {
return getRuleContext(ParenthesizedQueryContext.class,0);
}
+ public MatchAllQueryContext matchAllQuery() {
+ return getRuleContext(MatchAllQueryContext.class,0);
+ }
+ public ExistsQueryContext existsQuery() {
+ return getRuleContext(ExistsQueryContext.class,0);
+ }
+ public RangeQueryContext rangeQuery() {
+ return getRuleContext(RangeQueryContext.class,0);
+ }
+ public FieldQueryContext fieldQuery() {
+ return getRuleContext(FieldQueryContext.class,0);
+ }
+ public FieldLessQueryContext fieldLessQuery() {
+ return getRuleContext(FieldLessQueryContext.class,0);
+ }
public SimpleQueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@@ -378,83 +385,50 @@ public final SimpleQueryContext simpleQuery() throws RecognitionException {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(49);
+ setState(45);
nestedQuery();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(50);
- expression();
+ setState(46);
+ parenthesizedQuery();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
- setState(51);
- parenthesizedQuery();
+ setState(47);
+ matchAllQuery();
}
break;
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
- }
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class ExpressionContext extends ParserRuleContext {
- public FieldTermQueryContext fieldTermQuery() {
- return getRuleContext(FieldTermQueryContext.class,0);
- }
- public FieldRangeQueryContext fieldRangeQuery() {
- return getRuleContext(FieldRangeQueryContext.class,0);
- }
- public ExpressionContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_expression; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterExpression(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitExpression(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitExpression(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final ExpressionContext expression() throws RecognitionException {
- ExpressionContext _localctx = new ExpressionContext(_ctx, getState());
- enterRule(_localctx, 6, RULE_expression);
- try {
- setState(56);
- _errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) {
- case 1:
- enterOuterAlt(_localctx, 1);
+ case 4:
+ enterOuterAlt(_localctx, 4);
{
- setState(54);
- fieldTermQuery();
+ setState(48);
+ existsQuery();
}
break;
- case 2:
- enterOuterAlt(_localctx, 2);
+ case 5:
+ enterOuterAlt(_localctx, 5);
{
- setState(55);
- fieldRangeQuery();
+ setState(49);
+ rangeQuery();
+ }
+ break;
+ case 6:
+ enterOuterAlt(_localctx, 6);
+ {
+ setState(50);
+ fieldQuery();
+ }
+ break;
+ case 7:
+ enterOuterAlt(_localctx, 7);
+ {
+ setState(51);
+ fieldLessQuery();
}
break;
}
@@ -502,19 +476,19 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final NestedQueryContext nestedQuery() throws RecognitionException {
NestedQueryContext _localctx = new NestedQueryContext(_ctx, getState());
- enterRule(_localctx, 8, RULE_nestedQuery);
+ enterRule(_localctx, 6, RULE_nestedQuery);
try {
enterOuterAlt(_localctx, 1);
{
- setState(58);
+ setState(54);
fieldName();
- setState(59);
+ setState(55);
match(COLON);
- setState(60);
+ setState(56);
match(LEFT_CURLY_BRACKET);
- setState(61);
+ setState(57);
query(0);
- setState(62);
+ setState(58);
match(RIGHT_CURLY_BRACKET);
}
}
@@ -530,43 +504,51 @@ public final NestedQueryContext nestedQuery() throws RecognitionException {
}
@SuppressWarnings("CheckReturnValue")
- public static class ParenthesizedQueryContext extends ParserRuleContext {
- public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); }
- public QueryContext query() {
- return getRuleContext(QueryContext.class,0);
+ public static class MatchAllQueryContext extends ParserRuleContext {
+ public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); }
+ public TerminalNode WILDCARD(int i) {
+ return getToken(KqlBaseParser.WILDCARD, i);
}
- public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); }
- public ParenthesizedQueryContext(ParserRuleContext parent, int invokingState) {
+ public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); }
+ public MatchAllQueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_parenthesizedQuery; }
+ @Override public int getRuleIndex() { return RULE_matchAllQuery; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterParenthesizedQuery(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterMatchAllQuery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitParenthesizedQuery(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitMatchAllQuery(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitParenthesizedQuery(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitMatchAllQuery(this);
else return visitor.visitChildren(this);
}
}
- public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException {
- ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState());
- enterRule(_localctx, 10, RULE_parenthesizedQuery);
+ public final MatchAllQueryContext matchAllQuery() throws RecognitionException {
+ MatchAllQueryContext _localctx = new MatchAllQueryContext(_ctx, getState());
+ enterRule(_localctx, 8, RULE_matchAllQuery);
try {
enterOuterAlt(_localctx, 1);
{
+ setState(62);
+ _errHandler.sync(this);
+ switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) {
+ case 1:
+ {
+ setState(60);
+ match(WILDCARD);
+ setState(61);
+ match(COLON);
+ }
+ break;
+ }
setState(64);
- match(LEFT_PARENTHESIS);
- setState(65);
- query(0);
- setState(66);
- match(RIGHT_PARENTHESIS);
+ match(WILDCARD);
}
}
catch (RecognitionException re) {
@@ -581,46 +563,43 @@ public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionEx
}
@SuppressWarnings("CheckReturnValue")
- public static class FieldRangeQueryContext extends ParserRuleContext {
- public Token operator;
- public FieldNameContext fieldName() {
- return getRuleContext(FieldNameContext.class,0);
- }
- public RangeQueryValueContext rangeQueryValue() {
- return getRuleContext(RangeQueryValueContext.class,0);
+ public static class ParenthesizedQueryContext extends ParserRuleContext {
+ public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); }
+ public QueryContext query() {
+ return getRuleContext(QueryContext.class,0);
}
- public TerminalNode OP_COMPARE() { return getToken(KqlBaseParser.OP_COMPARE, 0); }
- public FieldRangeQueryContext(ParserRuleContext parent, int invokingState) {
+ public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); }
+ public ParenthesizedQueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_fieldRangeQuery; }
+ @Override public int getRuleIndex() { return RULE_parenthesizedQuery; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldRangeQuery(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterParenthesizedQuery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldRangeQuery(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitParenthesizedQuery(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitFieldRangeQuery(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitParenthesizedQuery(this);
else return visitor.visitChildren(this);
}
}
- public final FieldRangeQueryContext fieldRangeQuery() throws RecognitionException {
- FieldRangeQueryContext _localctx = new FieldRangeQueryContext(_ctx, getState());
- enterRule(_localctx, 12, RULE_fieldRangeQuery);
+ public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException {
+ ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState());
+ enterRule(_localctx, 10, RULE_parenthesizedQuery);
try {
enterOuterAlt(_localctx, 1);
{
+ setState(66);
+ match(LEFT_PARENTHESIS);
+ setState(67);
+ query(0);
setState(68);
- fieldName();
- setState(69);
- ((FieldRangeQueryContext)_localctx).operator = match(OP_COMPARE);
- setState(70);
- rangeQueryValue();
+ match(RIGHT_PARENTHESIS);
}
}
catch (RecognitionException re) {
@@ -635,53 +614,59 @@ public final FieldRangeQueryContext fieldRangeQuery() throws RecognitionExceptio
}
@SuppressWarnings("CheckReturnValue")
- public static class FieldTermQueryContext extends ParserRuleContext {
- public TermQueryValueContext termQueryValue() {
- return getRuleContext(TermQueryValueContext.class,0);
- }
+ public static class RangeQueryContext extends ParserRuleContext {
+ public Token operator;
public FieldNameContext fieldName() {
return getRuleContext(FieldNameContext.class,0);
}
- public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); }
- public FieldTermQueryContext(ParserRuleContext parent, int invokingState) {
+ public RangeQueryValueContext rangeQueryValue() {
+ return getRuleContext(RangeQueryValueContext.class,0);
+ }
+ public TerminalNode OP_LESS() { return getToken(KqlBaseParser.OP_LESS, 0); }
+ public TerminalNode OP_LESS_EQ() { return getToken(KqlBaseParser.OP_LESS_EQ, 0); }
+ public TerminalNode OP_MORE() { return getToken(KqlBaseParser.OP_MORE, 0); }
+ public TerminalNode OP_MORE_EQ() { return getToken(KqlBaseParser.OP_MORE_EQ, 0); }
+ public RangeQueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_fieldTermQuery; }
+ @Override public int getRuleIndex() { return RULE_rangeQuery; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldTermQuery(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterRangeQuery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldTermQuery(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitRangeQuery(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitFieldTermQuery(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitRangeQuery(this);
else return visitor.visitChildren(this);
}
}
- public final FieldTermQueryContext fieldTermQuery() throws RecognitionException {
- FieldTermQueryContext _localctx = new FieldTermQueryContext(_ctx, getState());
- enterRule(_localctx, 14, RULE_fieldTermQuery);
+ public final RangeQueryContext rangeQuery() throws RecognitionException {
+ RangeQueryContext _localctx = new RangeQueryContext(_ctx, getState());
+ enterRule(_localctx, 12, RULE_rangeQuery);
+ int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(75);
- _errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) {
- case 1:
- {
- setState(72);
- fieldName();
- setState(73);
- match(COLON);
- }
- break;
+ setState(70);
+ fieldName();
+ setState(71);
+ ((RangeQueryContext)_localctx).operator = _input.LT(1);
+ _la = _input.LA(1);
+ if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 960L) != 0)) ) {
+ ((RangeQueryContext)_localctx).operator = (Token)_errHandler.recoverInline(this);
+ }
+ else {
+ if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
+ _errHandler.reportMatch(this);
+ consume();
}
- setState(77);
- termQueryValue();
+ setState(72);
+ rangeQueryValue();
}
}
catch (RecognitionException re) {
@@ -696,61 +681,83 @@ public final FieldTermQueryContext fieldTermQuery() throws RecognitionException
}
@SuppressWarnings("CheckReturnValue")
- public static class FieldNameContext extends ParserRuleContext {
- public WildcardExpressionContext wildcardExpression() {
- return getRuleContext(WildcardExpressionContext.class,0);
- }
- public UnquotedLiteralExpressionContext unquotedLiteralExpression() {
- return getRuleContext(UnquotedLiteralExpressionContext.class,0);
+ public static class RangeQueryValueContext extends ParserRuleContext {
+ public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); }
+ public TerminalNode UNQUOTED_LITERAL(int i) {
+ return getToken(KqlBaseParser.UNQUOTED_LITERAL, i);
}
- public QuotedStringExpressionContext quotedStringExpression() {
- return getRuleContext(QuotedStringExpressionContext.class,0);
+ public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); }
+ public TerminalNode WILDCARD(int i) {
+ return getToken(KqlBaseParser.WILDCARD, i);
}
- public FieldNameContext(ParserRuleContext parent, int invokingState) {
+ public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); }
+ public RangeQueryValueContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_fieldName; }
+ @Override public int getRuleIndex() { return RULE_rangeQueryValue; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldName(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterRangeQueryValue(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldName(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitRangeQueryValue(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitFieldName(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitRangeQueryValue(this);
else return visitor.visitChildren(this);
}
}
- public final FieldNameContext fieldName() throws RecognitionException {
- FieldNameContext _localctx = new FieldNameContext(_ctx, getState());
- enterRule(_localctx, 16, RULE_fieldName);
+ public final RangeQueryValueContext rangeQueryValue() throws RecognitionException {
+ RangeQueryValueContext _localctx = new RangeQueryValueContext(_ctx, getState());
+ enterRule(_localctx, 14, RULE_rangeQueryValue);
+ int _la;
try {
- setState(82);
+ int _alt;
+ setState(80);
_errHandler.sync(this);
switch (_input.LA(1)) {
+ case UNQUOTED_LITERAL:
case WILDCARD:
enterOuterAlt(_localctx, 1);
{
- setState(79);
- wildcardExpression();
- }
- break;
- case UNQUOTED_LITERAL:
- enterOuterAlt(_localctx, 2);
- {
- setState(80);
- unquotedLiteralExpression();
+ setState(75);
+ _errHandler.sync(this);
+ _alt = 1;
+ do {
+ switch (_alt) {
+ case 1:
+ {
+ {
+ setState(74);
+ _la = _input.LA(1);
+ if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) {
+ _errHandler.recoverInline(this);
+ }
+ else {
+ if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
+ _errHandler.reportMatch(this);
+ consume();
+ }
+ }
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
+ }
+ setState(77);
+ _errHandler.sync(this);
+ _alt = getInterpreter().adaptivePredict(_input,5,_ctx);
+ } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER );
}
break;
case QUOTED_STRING:
- enterOuterAlt(_localctx, 3);
+ enterOuterAlt(_localctx, 2);
{
- setState(81);
- quotedStringExpression();
+ setState(79);
+ match(QUOTED_STRING);
}
break;
default:
@@ -769,55 +776,43 @@ public final FieldNameContext fieldName() throws RecognitionException {
}
@SuppressWarnings("CheckReturnValue")
- public static class RangeQueryValueContext extends ParserRuleContext {
- public UnquotedLiteralExpressionContext unquotedLiteralExpression() {
- return getRuleContext(UnquotedLiteralExpressionContext.class,0);
- }
- public QuotedStringExpressionContext quotedStringExpression() {
- return getRuleContext(QuotedStringExpressionContext.class,0);
+ public static class ExistsQueryContext extends ParserRuleContext {
+ public FieldNameContext fieldName() {
+ return getRuleContext(FieldNameContext.class,0);
}
- public RangeQueryValueContext(ParserRuleContext parent, int invokingState) {
+ public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); }
+ public TerminalNode WILDCARD() { return getToken(KqlBaseParser.WILDCARD, 0); }
+ public ExistsQueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_rangeQueryValue; }
+ @Override public int getRuleIndex() { return RULE_existsQuery; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterRangeQueryValue(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterExistsQuery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitRangeQueryValue(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitExistsQuery(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitRangeQueryValue(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitExistsQuery(this);
else return visitor.visitChildren(this);
}
}
- public final RangeQueryValueContext rangeQueryValue() throws RecognitionException {
- RangeQueryValueContext _localctx = new RangeQueryValueContext(_ctx, getState());
- enterRule(_localctx, 18, RULE_rangeQueryValue);
+ public final ExistsQueryContext existsQuery() throws RecognitionException {
+ ExistsQueryContext _localctx = new ExistsQueryContext(_ctx, getState());
+ enterRule(_localctx, 16, RULE_existsQuery);
try {
- setState(86);
- _errHandler.sync(this);
- switch (_input.LA(1)) {
- case UNQUOTED_LITERAL:
- enterOuterAlt(_localctx, 1);
- {
- setState(84);
- unquotedLiteralExpression();
- }
- break;
- case QUOTED_STRING:
- enterOuterAlt(_localctx, 2);
- {
- setState(85);
- quotedStringExpression();
- }
- break;
- default:
- throw new NoViableAltException(this);
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(82);
+ fieldName();
+ setState(83);
+ match(COLON);
+ setState(84);
+ match(WILDCARD);
}
}
catch (RecognitionException re) {
@@ -832,76 +827,68 @@ public final RangeQueryValueContext rangeQueryValue() throws RecognitionExceptio
}
@SuppressWarnings("CheckReturnValue")
- public static class TermQueryValueContext extends ParserRuleContext {
- public UnquotedLiteralExpressionContext termValue;
- public WildcardExpressionContext wildcardExpression() {
- return getRuleContext(WildcardExpressionContext.class,0);
- }
- public QuotedStringExpressionContext quotedStringExpression() {
- return getRuleContext(QuotedStringExpressionContext.class,0);
- }
- public UnquotedLiteralExpressionContext unquotedLiteralExpression() {
- return getRuleContext(UnquotedLiteralExpressionContext.class,0);
+ public static class FieldQueryContext extends ParserRuleContext {
+ public FieldNameContext fieldName() {
+ return getRuleContext(FieldNameContext.class,0);
}
- public GroupingTermExpressionContext groupingTermExpression() {
- return getRuleContext(GroupingTermExpressionContext.class,0);
+ public TerminalNode COLON() { return getToken(KqlBaseParser.COLON, 0); }
+ public FieldQueryValueContext fieldQueryValue() {
+ return getRuleContext(FieldQueryValueContext.class,0);
}
- public TermQueryValueContext(ParserRuleContext parent, int invokingState) {
+ public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); }
+ public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); }
+ public FieldQueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_termQueryValue; }
+ @Override public int getRuleIndex() { return RULE_fieldQuery; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterTermQueryValue(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldQuery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitTermQueryValue(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldQuery(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitTermQueryValue(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitFieldQuery(this);
else return visitor.visitChildren(this);
}
}
- public final TermQueryValueContext termQueryValue() throws RecognitionException {
- TermQueryValueContext _localctx = new TermQueryValueContext(_ctx, getState());
- enterRule(_localctx, 20, RULE_termQueryValue);
+ public final FieldQueryContext fieldQuery() throws RecognitionException {
+ FieldQueryContext _localctx = new FieldQueryContext(_ctx, getState());
+ enterRule(_localctx, 18, RULE_fieldQuery);
try {
- setState(92);
+ setState(96);
_errHandler.sync(this);
- switch (_input.LA(1)) {
- case WILDCARD:
+ switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) {
+ case 1:
enterOuterAlt(_localctx, 1);
{
+ setState(86);
+ fieldName();
+ setState(87);
+ match(COLON);
setState(88);
- wildcardExpression();
+ fieldQueryValue();
}
break;
- case QUOTED_STRING:
+ case 2:
enterOuterAlt(_localctx, 2);
{
- setState(89);
- quotedStringExpression();
- }
- break;
- case UNQUOTED_LITERAL:
- enterOuterAlt(_localctx, 3);
- {
setState(90);
- ((TermQueryValueContext)_localctx).termValue = unquotedLiteralExpression();
- }
- break;
- case LEFT_PARENTHESIS:
- enterOuterAlt(_localctx, 4);
- {
+ fieldName();
setState(91);
- groupingTermExpression();
+ match(COLON);
+ setState(92);
+ match(LEFT_PARENTHESIS);
+ setState(93);
+ fieldQueryValue();
+ setState(94);
+ match(RIGHT_PARENTHESIS);
}
break;
- default:
- throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
@@ -916,43 +903,63 @@ public final TermQueryValueContext termQueryValue() throws RecognitionException
}
@SuppressWarnings("CheckReturnValue")
- public static class GroupingTermExpressionContext extends ParserRuleContext {
- public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); }
- public UnquotedLiteralExpressionContext unquotedLiteralExpression() {
- return getRuleContext(UnquotedLiteralExpressionContext.class,0);
+ public static class FieldLessQueryContext extends ParserRuleContext {
+ public FieldQueryValueContext fieldQueryValue() {
+ return getRuleContext(FieldQueryValueContext.class,0);
}
+ public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); }
public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); }
- public GroupingTermExpressionContext(ParserRuleContext parent, int invokingState) {
+ public FieldLessQueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_groupingTermExpression; }
+ @Override public int getRuleIndex() { return RULE_fieldLessQuery; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterGroupingTermExpression(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldLessQuery(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitGroupingTermExpression(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldLessQuery(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitGroupingTermExpression(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitFieldLessQuery(this);
else return visitor.visitChildren(this);
}
}
- public final GroupingTermExpressionContext groupingTermExpression() throws RecognitionException {
- GroupingTermExpressionContext _localctx = new GroupingTermExpressionContext(_ctx, getState());
- enterRule(_localctx, 22, RULE_groupingTermExpression);
+ public final FieldLessQueryContext fieldLessQuery() throws RecognitionException {
+ FieldLessQueryContext _localctx = new FieldLessQueryContext(_ctx, getState());
+ enterRule(_localctx, 20, RULE_fieldLessQuery);
try {
- enterOuterAlt(_localctx, 1);
- {
- setState(94);
- match(LEFT_PARENTHESIS);
- setState(95);
- unquotedLiteralExpression();
- setState(96);
- match(RIGHT_PARENTHESIS);
+ setState(103);
+ _errHandler.sync(this);
+ switch (_input.LA(1)) {
+ case AND:
+ case OR:
+ case NOT:
+ case UNQUOTED_LITERAL:
+ case QUOTED_STRING:
+ case WILDCARD:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(98);
+ fieldQueryValue();
+ }
+ break;
+ case LEFT_PARENTHESIS:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(99);
+ match(LEFT_PARENTHESIS);
+ setState(100);
+ fieldQueryValue();
+ setState(101);
+ match(RIGHT_PARENTHESIS);
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
@@ -967,57 +974,171 @@ public final GroupingTermExpressionContext groupingTermExpression() throws Recog
}
@SuppressWarnings("CheckReturnValue")
- public static class UnquotedLiteralExpressionContext extends ParserRuleContext {
+ public static class FieldQueryValueContext extends ParserRuleContext {
+ public TerminalNode AND() { return getToken(KqlBaseParser.AND, 0); }
+ public TerminalNode OR() { return getToken(KqlBaseParser.OR, 0); }
public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); }
public TerminalNode UNQUOTED_LITERAL(int i) {
return getToken(KqlBaseParser.UNQUOTED_LITERAL, i);
}
- public UnquotedLiteralExpressionContext(ParserRuleContext parent, int invokingState) {
+ public List WILDCARD() { return getTokens(KqlBaseParser.WILDCARD); }
+ public TerminalNode WILDCARD(int i) {
+ return getToken(KqlBaseParser.WILDCARD, i);
+ }
+ public TerminalNode NOT() { return getToken(KqlBaseParser.NOT, 0); }
+ public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); }
+ public FieldQueryValueContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_unquotedLiteralExpression; }
+ @Override public int getRuleIndex() { return RULE_fieldQueryValue; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterUnquotedLiteralExpression(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldQueryValue(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitUnquotedLiteralExpression(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldQueryValue(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitUnquotedLiteralExpression(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitFieldQueryValue(this);
else return visitor.visitChildren(this);
}
}
- public final UnquotedLiteralExpressionContext unquotedLiteralExpression() throws RecognitionException {
- UnquotedLiteralExpressionContext _localctx = new UnquotedLiteralExpressionContext(_ctx, getState());
- enterRule(_localctx, 24, RULE_unquotedLiteralExpression);
+ public final FieldQueryValueContext fieldQueryValue() throws RecognitionException {
+ FieldQueryValueContext _localctx = new FieldQueryValueContext(_ctx, getState());
+ enterRule(_localctx, 22, RULE_fieldQueryValue);
+ int _la;
try {
int _alt;
- enterOuterAlt(_localctx, 1);
- {
- setState(99);
+ setState(123);
_errHandler.sync(this);
- _alt = 1;
- do {
- switch (_alt) {
- case 1:
+ switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) {
+ case 1:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(106);
+ _errHandler.sync(this);
+ _la = _input.LA(1);
+ if (_la==AND || _la==OR) {
{
+ setState(105);
+ _la = _input.LA(1);
+ if ( !(_la==AND || _la==OR) ) {
+ _errHandler.recoverInline(this);
+ }
+ else {
+ if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
+ _errHandler.reportMatch(this);
+ consume();
+ }
+ }
+ }
+
+ setState(109);
+ _errHandler.sync(this);
+ _alt = 1;
+ do {
+ switch (_alt) {
+ case 1:
+ {
+ {
+ setState(108);
+ _la = _input.LA(1);
+ if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) {
+ _errHandler.recoverInline(this);
+ }
+ else {
+ if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
+ _errHandler.reportMatch(this);
+ consume();
+ }
+ }
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
+ }
+ setState(111);
+ _errHandler.sync(this);
+ _alt = getInterpreter().adaptivePredict(_input,10,_ctx);
+ } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER );
+ }
+ break;
+ case 2:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(114);
+ _errHandler.sync(this);
+ _alt = 1;
+ do {
+ switch (_alt) {
+ case 1:
+ {
+ {
+ setState(113);
+ _la = _input.LA(1);
+ if ( !(_la==UNQUOTED_LITERAL || _la==WILDCARD) ) {
+ _errHandler.recoverInline(this);
+ }
+ else {
+ if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
+ _errHandler.reportMatch(this);
+ consume();
+ }
+ }
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
+ }
+ setState(116);
+ _errHandler.sync(this);
+ _alt = getInterpreter().adaptivePredict(_input,11,_ctx);
+ } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER );
+ setState(119);
+ _errHandler.sync(this);
+ switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) {
+ case 1:
{
- setState(98);
- match(UNQUOTED_LITERAL);
+ setState(118);
+ _la = _input.LA(1);
+ if ( !(_la==AND || _la==OR) ) {
+ _errHandler.recoverInline(this);
+ }
+ else {
+ if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
+ _errHandler.reportMatch(this);
+ consume();
}
}
break;
- default:
- throw new NoViableAltException(this);
}
- setState(101);
- _errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,9,_ctx);
- } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER );
+ }
+ break;
+ case 3:
+ enterOuterAlt(_localctx, 3);
+ {
+ setState(121);
+ _la = _input.LA(1);
+ if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 28L) != 0)) ) {
+ _errHandler.recoverInline(this);
+ }
+ else {
+ if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
+ _errHandler.reportMatch(this);
+ consume();
+ }
+ }
+ break;
+ case 4:
+ enterOuterAlt(_localctx, 4);
+ {
+ setState(122);
+ match(QUOTED_STRING);
+ }
+ break;
}
}
catch (RecognitionException re) {
@@ -1032,78 +1153,76 @@ public final UnquotedLiteralExpressionContext unquotedLiteralExpression() throws
}
@SuppressWarnings("CheckReturnValue")
- public static class QuotedStringExpressionContext extends ParserRuleContext {
- public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); }
- public QuotedStringExpressionContext(ParserRuleContext parent, int invokingState) {
- super(parent, invokingState);
- }
- @Override public int getRuleIndex() { return RULE_quotedStringExpression; }
- @Override
- public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterQuotedStringExpression(this);
- }
- @Override
- public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitQuotedStringExpression(this);
- }
- @Override
- public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitQuotedStringExpression(this);
- else return visitor.visitChildren(this);
- }
- }
-
- public final QuotedStringExpressionContext quotedStringExpression() throws RecognitionException {
- QuotedStringExpressionContext _localctx = new QuotedStringExpressionContext(_ctx, getState());
- enterRule(_localctx, 26, RULE_quotedStringExpression);
- try {
- enterOuterAlt(_localctx, 1);
- {
- setState(103);
- match(QUOTED_STRING);
- }
- }
- catch (RecognitionException re) {
- _localctx.exception = re;
- _errHandler.reportError(this, re);
- _errHandler.recover(this, re);
- }
- finally {
- exitRule();
+ public static class FieldNameContext extends ParserRuleContext {
+ public Token value;
+ public List UNQUOTED_LITERAL() { return getTokens(KqlBaseParser.UNQUOTED_LITERAL); }
+ public TerminalNode UNQUOTED_LITERAL(int i) {
+ return getToken(KqlBaseParser.UNQUOTED_LITERAL, i);
}
- return _localctx;
- }
-
- @SuppressWarnings("CheckReturnValue")
- public static class WildcardExpressionContext extends ParserRuleContext {
+ public TerminalNode QUOTED_STRING() { return getToken(KqlBaseParser.QUOTED_STRING, 0); }
public TerminalNode WILDCARD() { return getToken(KqlBaseParser.WILDCARD, 0); }
- public WildcardExpressionContext(ParserRuleContext parent, int invokingState) {
+ public FieldNameContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
- @Override public int getRuleIndex() { return RULE_wildcardExpression; }
+ @Override public int getRuleIndex() { return RULE_fieldName; }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterWildcardExpression(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterFieldName(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitWildcardExpression(this);
+ if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitFieldName(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitWildcardExpression(this);
+ if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor extends T>)visitor).visitFieldName(this);
else return visitor.visitChildren(this);
}
}
- public final WildcardExpressionContext wildcardExpression() throws RecognitionException {
- WildcardExpressionContext _localctx = new WildcardExpressionContext(_ctx, getState());
- enterRule(_localctx, 28, RULE_wildcardExpression);
+ public final FieldNameContext fieldName() throws RecognitionException {
+ FieldNameContext _localctx = new FieldNameContext(_ctx, getState());
+ enterRule(_localctx, 24, RULE_fieldName);
+ int _la;
try {
- enterOuterAlt(_localctx, 1);
- {
- setState(105);
- match(WILDCARD);
+ setState(132);
+ _errHandler.sync(this);
+ switch (_input.LA(1)) {
+ case UNQUOTED_LITERAL:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(126);
+ _errHandler.sync(this);
+ _la = _input.LA(1);
+ do {
+ {
+ {
+ setState(125);
+ ((FieldNameContext)_localctx).value = match(UNQUOTED_LITERAL);
+ }
+ }
+ setState(128);
+ _errHandler.sync(this);
+ _la = _input.LA(1);
+ } while ( _la==UNQUOTED_LITERAL );
+ }
+ break;
+ case QUOTED_STRING:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(130);
+ ((FieldNameContext)_localctx).value = match(QUOTED_STRING);
+ }
+ break;
+ case WILDCARD:
+ enterOuterAlt(_localctx, 3);
+ {
+ setState(131);
+ ((FieldNameContext)_localctx).value = match(WILDCARD);
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
@@ -1133,65 +1252,86 @@ private boolean query_sempred(QueryContext _localctx, int predIndex) {
}
public static final String _serializedATN =
- "\u0004\u0001\rl\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+
- "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+
- "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+
- "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+
- "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0001\u0000\u0003\u0000"+
- " \b\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001"+
- "\u0001\u0001\u0003\u0001(\b\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+
- "\u0005\u0001-\b\u0001\n\u0001\f\u00010\t\u0001\u0001\u0002\u0001\u0002"+
- "\u0001\u0002\u0003\u00025\b\u0002\u0001\u0003\u0001\u0003\u0003\u0003"+
- "9\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+
- "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006"+
- "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+
- "\u0003\u0007L\b\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b"+
- "\u0003\bS\b\b\u0001\t\u0001\t\u0003\tW\b\t\u0001\n\u0001\n\u0001\n\u0001"+
- "\n\u0003\n]\b\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+
- "\f\u0004\fd\b\f\u000b\f\f\fe\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+
- "\u000e\u0000\u0001\u0002\u000f\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+
- "\u0012\u0014\u0016\u0018\u001a\u001c\u0000\u0001\u0001\u0000\u0002\u0003"+
- "j\u0000\u001f\u0001\u0000\u0000\u0000\u0002\'\u0001\u0000\u0000\u0000"+
- "\u00044\u0001\u0000\u0000\u0000\u00068\u0001\u0000\u0000\u0000\b:\u0001"+
- "\u0000\u0000\u0000\n@\u0001\u0000\u0000\u0000\fD\u0001\u0000\u0000\u0000"+
- "\u000eK\u0001\u0000\u0000\u0000\u0010R\u0001\u0000\u0000\u0000\u0012V"+
- "\u0001\u0000\u0000\u0000\u0014\\\u0001\u0000\u0000\u0000\u0016^\u0001"+
- "\u0000\u0000\u0000\u0018c\u0001\u0000\u0000\u0000\u001ag\u0001\u0000\u0000"+
- "\u0000\u001ci\u0001\u0000\u0000\u0000\u001e \u0003\u0002\u0001\u0000\u001f"+
- "\u001e\u0001\u0000\u0000\u0000\u001f \u0001\u0000\u0000\u0000 !\u0001"+
- "\u0000\u0000\u0000!\"\u0005\u0000\u0000\u0001\"\u0001\u0001\u0000\u0000"+
- "\u0000#$\u0006\u0001\uffff\uffff\u0000$%\u0005\u0004\u0000\u0000%(\u0003"+
- "\u0004\u0002\u0000&(\u0003\u0004\u0002\u0000\'#\u0001\u0000\u0000\u0000"+
- "\'&\u0001\u0000\u0000\u0000(.\u0001\u0000\u0000\u0000)*\n\u0003\u0000"+
- "\u0000*+\u0007\u0000\u0000\u0000+-\u0003\u0002\u0001\u0004,)\u0001\u0000"+
- "\u0000\u0000-0\u0001\u0000\u0000\u0000.,\u0001\u0000\u0000\u0000./\u0001"+
- "\u0000\u0000\u0000/\u0003\u0001\u0000\u0000\u00000.\u0001\u0000\u0000"+
- "\u000015\u0003\b\u0004\u000025\u0003\u0006\u0003\u000035\u0003\n\u0005"+
- "\u000041\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u000043\u0001\u0000"+
- "\u0000\u00005\u0005\u0001\u0000\u0000\u000069\u0003\u000e\u0007\u0000"+
- "79\u0003\f\u0006\u000086\u0001\u0000\u0000\u000087\u0001\u0000\u0000\u0000"+
- "9\u0007\u0001\u0000\u0000\u0000:;\u0003\u0010\b\u0000;<\u0005\u0005\u0000"+
- "\u0000<=\u0005\t\u0000\u0000=>\u0003\u0002\u0001\u0000>?\u0005\n\u0000"+
- "\u0000?\t\u0001\u0000\u0000\u0000@A\u0005\u0007\u0000\u0000AB\u0003\u0002"+
- "\u0001\u0000BC\u0005\b\u0000\u0000C\u000b\u0001\u0000\u0000\u0000DE\u0003"+
- "\u0010\b\u0000EF\u0005\u0006\u0000\u0000FG\u0003\u0012\t\u0000G\r\u0001"+
- "\u0000\u0000\u0000HI\u0003\u0010\b\u0000IJ\u0005\u0005\u0000\u0000JL\u0001"+
- "\u0000\u0000\u0000KH\u0001\u0000\u0000\u0000KL\u0001\u0000\u0000\u0000"+
- "LM\u0001\u0000\u0000\u0000MN\u0003\u0014\n\u0000N\u000f\u0001\u0000\u0000"+
- "\u0000OS\u0003\u001c\u000e\u0000PS\u0003\u0018\f\u0000QS\u0003\u001a\r"+
- "\u0000RO\u0001\u0000\u0000\u0000RP\u0001\u0000\u0000\u0000RQ\u0001\u0000"+
- "\u0000\u0000S\u0011\u0001\u0000\u0000\u0000TW\u0003\u0018\f\u0000UW\u0003"+
- "\u001a\r\u0000VT\u0001\u0000\u0000\u0000VU\u0001\u0000\u0000\u0000W\u0013"+
- "\u0001\u0000\u0000\u0000X]\u0003\u001c\u000e\u0000Y]\u0003\u001a\r\u0000"+
- "Z]\u0003\u0018\f\u0000[]\u0003\u0016\u000b\u0000\\X\u0001\u0000\u0000"+
- "\u0000\\Y\u0001\u0000\u0000\u0000\\Z\u0001\u0000\u0000\u0000\\[\u0001"+
- "\u0000\u0000\u0000]\u0015\u0001\u0000\u0000\u0000^_\u0005\u0007\u0000"+
- "\u0000_`\u0003\u0018\f\u0000`a\u0005\b\u0000\u0000a\u0017\u0001\u0000"+
- "\u0000\u0000bd\u0005\u000b\u0000\u0000cb\u0001\u0000\u0000\u0000de\u0001"+
- "\u0000\u0000\u0000ec\u0001\u0000\u0000\u0000ef\u0001\u0000\u0000\u0000"+
- "f\u0019\u0001\u0000\u0000\u0000gh\u0005\f\u0000\u0000h\u001b\u0001\u0000"+
- "\u0000\u0000ij\u0005\r\u0000\u0000j\u001d\u0001\u0000\u0000\u0000\n\u001f"+
- "\'.48KRV\\e";
+ "\u0004\u0001\u0010\u0087\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+
+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+
+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+
+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+
+ "\u0002\f\u0007\f\u0001\u0000\u0003\u0000\u001c\b\u0000\u0001\u0000\u0001"+
+ "\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0003\u0001$\b"+
+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001)\b\u0001\n\u0001"+
+ "\f\u0001,\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+
+ "\u0002\u0001\u0002\u0001\u0002\u0003\u00025\b\u0002\u0001\u0003\u0001"+
+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+
+ "\u0004\u0003\u0004?\b\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+
+ "\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+
+ "\u0006\u0001\u0007\u0004\u0007L\b\u0007\u000b\u0007\f\u0007M\u0001\u0007"+
+ "\u0003\u0007Q\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t"+
+ "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003"+
+ "\ta\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\nh\b\n\u0001\u000b"+
+ "\u0003\u000bk\b\u000b\u0001\u000b\u0004\u000bn\b\u000b\u000b\u000b\f\u000b"+
+ "o\u0001\u000b\u0004\u000bs\b\u000b\u000b\u000b\f\u000bt\u0001\u000b\u0003"+
+ "\u000bx\b\u000b\u0001\u000b\u0001\u000b\u0003\u000b|\b\u000b\u0001\f\u0004"+
+ "\f\u007f\b\f\u000b\f\f\f\u0080\u0001\f\u0001\f\u0003\f\u0085\b\f\u0001"+
+ "\f\u0000\u0001\u0002\r\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+
+ "\u0014\u0016\u0018\u0000\u0004\u0001\u0000\u0002\u0003\u0001\u0000\u0006"+
+ "\t\u0002\u0000\u000e\u000e\u0010\u0010\u0001\u0000\u0002\u0004\u0091\u0000"+
+ "\u001b\u0001\u0000\u0000\u0000\u0002#\u0001\u0000\u0000\u0000\u00044\u0001"+
+ "\u0000\u0000\u0000\u00066\u0001\u0000\u0000\u0000\b>\u0001\u0000\u0000"+
+ "\u0000\nB\u0001\u0000\u0000\u0000\fF\u0001\u0000\u0000\u0000\u000eP\u0001"+
+ "\u0000\u0000\u0000\u0010R\u0001\u0000\u0000\u0000\u0012`\u0001\u0000\u0000"+
+ "\u0000\u0014g\u0001\u0000\u0000\u0000\u0016{\u0001\u0000\u0000\u0000\u0018"+
+ "\u0084\u0001\u0000\u0000\u0000\u001a\u001c\u0003\u0002\u0001\u0000\u001b"+
+ "\u001a\u0001\u0000\u0000\u0000\u001b\u001c\u0001\u0000\u0000\u0000\u001c"+
+ "\u001d\u0001\u0000\u0000\u0000\u001d\u001e\u0005\u0000\u0000\u0001\u001e"+
+ "\u0001\u0001\u0000\u0000\u0000\u001f \u0006\u0001\uffff\uffff\u0000 !"+
+ "\u0005\u0004\u0000\u0000!$\u0003\u0004\u0002\u0000\"$\u0003\u0004\u0002"+
+ "\u0000#\u001f\u0001\u0000\u0000\u0000#\"\u0001\u0000\u0000\u0000$*\u0001"+
+ "\u0000\u0000\u0000%&\n\u0003\u0000\u0000&\'\u0007\u0000\u0000\u0000\'"+
+ ")\u0003\u0002\u0001\u0003(%\u0001\u0000\u0000\u0000),\u0001\u0000\u0000"+
+ "\u0000*(\u0001\u0000\u0000\u0000*+\u0001\u0000\u0000\u0000+\u0003\u0001"+
+ "\u0000\u0000\u0000,*\u0001\u0000\u0000\u0000-5\u0003\u0006\u0003\u0000"+
+ ".5\u0003\n\u0005\u0000/5\u0003\b\u0004\u000005\u0003\u0010\b\u000015\u0003"+
+ "\f\u0006\u000025\u0003\u0012\t\u000035\u0003\u0014\n\u00004-\u0001\u0000"+
+ "\u0000\u00004.\u0001\u0000\u0000\u00004/\u0001\u0000\u0000\u000040\u0001"+
+ "\u0000\u0000\u000041\u0001\u0000\u0000\u000042\u0001\u0000\u0000\u0000"+
+ "43\u0001\u0000\u0000\u00005\u0005\u0001\u0000\u0000\u000067\u0003\u0018"+
+ "\f\u000078\u0005\u0005\u0000\u000089\u0005\f\u0000\u00009:\u0003\u0002"+
+ "\u0001\u0000:;\u0005\r\u0000\u0000;\u0007\u0001\u0000\u0000\u0000<=\u0005"+
+ "\u0010\u0000\u0000=?\u0005\u0005\u0000\u0000><\u0001\u0000\u0000\u0000"+
+ ">?\u0001\u0000\u0000\u0000?@\u0001\u0000\u0000\u0000@A\u0005\u0010\u0000"+
+ "\u0000A\t\u0001\u0000\u0000\u0000BC\u0005\n\u0000\u0000CD\u0003\u0002"+
+ "\u0001\u0000DE\u0005\u000b\u0000\u0000E\u000b\u0001\u0000\u0000\u0000"+
+ "FG\u0003\u0018\f\u0000GH\u0007\u0001\u0000\u0000HI\u0003\u000e\u0007\u0000"+
+ "I\r\u0001\u0000\u0000\u0000JL\u0007\u0002\u0000\u0000KJ\u0001\u0000\u0000"+
+ "\u0000LM\u0001\u0000\u0000\u0000MK\u0001\u0000\u0000\u0000MN\u0001\u0000"+
+ "\u0000\u0000NQ\u0001\u0000\u0000\u0000OQ\u0005\u000f\u0000\u0000PK\u0001"+
+ "\u0000\u0000\u0000PO\u0001\u0000\u0000\u0000Q\u000f\u0001\u0000\u0000"+
+ "\u0000RS\u0003\u0018\f\u0000ST\u0005\u0005\u0000\u0000TU\u0005\u0010\u0000"+
+ "\u0000U\u0011\u0001\u0000\u0000\u0000VW\u0003\u0018\f\u0000WX\u0005\u0005"+
+ "\u0000\u0000XY\u0003\u0016\u000b\u0000Ya\u0001\u0000\u0000\u0000Z[\u0003"+
+ "\u0018\f\u0000[\\\u0005\u0005\u0000\u0000\\]\u0005\n\u0000\u0000]^\u0003"+
+ "\u0016\u000b\u0000^_\u0005\u000b\u0000\u0000_a\u0001\u0000\u0000\u0000"+
+ "`V\u0001\u0000\u0000\u0000`Z\u0001\u0000\u0000\u0000a\u0013\u0001\u0000"+
+ "\u0000\u0000bh\u0003\u0016\u000b\u0000cd\u0005\n\u0000\u0000de\u0003\u0016"+
+ "\u000b\u0000ef\u0005\u000b\u0000\u0000fh\u0001\u0000\u0000\u0000gb\u0001"+
+ "\u0000\u0000\u0000gc\u0001\u0000\u0000\u0000h\u0015\u0001\u0000\u0000"+
+ "\u0000ik\u0007\u0000\u0000\u0000ji\u0001\u0000\u0000\u0000jk\u0001\u0000"+
+ "\u0000\u0000km\u0001\u0000\u0000\u0000ln\u0007\u0002\u0000\u0000ml\u0001"+
+ "\u0000\u0000\u0000no\u0001\u0000\u0000\u0000om\u0001\u0000\u0000\u0000"+
+ "op\u0001\u0000\u0000\u0000p|\u0001\u0000\u0000\u0000qs\u0007\u0002\u0000"+
+ "\u0000rq\u0001\u0000\u0000\u0000st\u0001\u0000\u0000\u0000tr\u0001\u0000"+
+ "\u0000\u0000tu\u0001\u0000\u0000\u0000uw\u0001\u0000\u0000\u0000vx\u0007"+
+ "\u0000\u0000\u0000wv\u0001\u0000\u0000\u0000wx\u0001\u0000\u0000\u0000"+
+ "x|\u0001\u0000\u0000\u0000y|\u0007\u0003\u0000\u0000z|\u0005\u000f\u0000"+
+ "\u0000{j\u0001\u0000\u0000\u0000{r\u0001\u0000\u0000\u0000{y\u0001\u0000"+
+ "\u0000\u0000{z\u0001\u0000\u0000\u0000|\u0017\u0001\u0000\u0000\u0000"+
+ "}\u007f\u0005\u000e\u0000\u0000~}\u0001\u0000\u0000\u0000\u007f\u0080"+
+ "\u0001\u0000\u0000\u0000\u0080~\u0001\u0000\u0000\u0000\u0080\u0081\u0001"+
+ "\u0000\u0000\u0000\u0081\u0085\u0001\u0000\u0000\u0000\u0082\u0085\u0005"+
+ "\u000f\u0000\u0000\u0083\u0085\u0005\u0010\u0000\u0000\u0084~\u0001\u0000"+
+ "\u0000\u0000\u0084\u0082\u0001\u0000\u0000\u0000\u0084\u0083\u0001\u0000"+
+ "\u0000\u0000\u0085\u0019\u0001\u0000\u0000\u0000\u0010\u001b#*4>MP`gj"+
+ "otw{\u0080\u0084";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java
index 55fa21f0e899d..67253e4364190 100644
--- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseVisitor.java
@@ -51,12 +51,6 @@ interface KqlBaseVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitSimpleQuery(KqlBaseParser.SimpleQueryContext ctx);
- /**
- * Visit a parse tree produced by {@link KqlBaseParser#expression}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitExpression(KqlBaseParser.ExpressionContext ctx);
/**
* Visit a parse tree produced by {@link KqlBaseParser#nestedQuery}.
* @param ctx the parse tree
@@ -64,29 +58,23 @@ interface KqlBaseVisitor extends ParseTreeVisitor {
*/
T visitNestedQuery(KqlBaseParser.NestedQueryContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}.
- * @param ctx the parse tree
- * @return the visitor result
- */
- T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx);
- /**
- * Visit a parse tree produced by {@link KqlBaseParser#fieldRangeQuery}.
+ * Visit a parse tree produced by {@link KqlBaseParser#matchAllQuery}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitFieldRangeQuery(KqlBaseParser.FieldRangeQueryContext ctx);
+ T visitMatchAllQuery(KqlBaseParser.MatchAllQueryContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#fieldTermQuery}.
+ * Visit a parse tree produced by {@link KqlBaseParser#parenthesizedQuery}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitFieldTermQuery(KqlBaseParser.FieldTermQueryContext ctx);
+ T visitParenthesizedQuery(KqlBaseParser.ParenthesizedQueryContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#fieldName}.
+ * Visit a parse tree produced by {@link KqlBaseParser#rangeQuery}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitFieldName(KqlBaseParser.FieldNameContext ctx);
+ T visitRangeQuery(KqlBaseParser.RangeQueryContext ctx);
/**
* Visit a parse tree produced by {@link KqlBaseParser#rangeQueryValue}.
* @param ctx the parse tree
@@ -94,33 +82,33 @@ interface KqlBaseVisitor extends ParseTreeVisitor {
*/
T visitRangeQueryValue(KqlBaseParser.RangeQueryValueContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#termQueryValue}.
+ * Visit a parse tree produced by {@link KqlBaseParser#existsQuery}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitTermQueryValue(KqlBaseParser.TermQueryValueContext ctx);
+ T visitExistsQuery(KqlBaseParser.ExistsQueryContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#groupingTermExpression}.
+ * Visit a parse tree produced by {@link KqlBaseParser#fieldQuery}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitGroupingTermExpression(KqlBaseParser.GroupingTermExpressionContext ctx);
+ T visitFieldQuery(KqlBaseParser.FieldQueryContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#unquotedLiteralExpression}.
+ * Visit a parse tree produced by {@link KqlBaseParser#fieldLessQuery}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitUnquotedLiteralExpression(KqlBaseParser.UnquotedLiteralExpressionContext ctx);
+ T visitFieldLessQuery(KqlBaseParser.FieldLessQueryContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#quotedStringExpression}.
+ * Visit a parse tree produced by {@link KqlBaseParser#fieldQueryValue}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitQuotedStringExpression(KqlBaseParser.QuotedStringExpressionContext ctx);
+ T visitFieldQueryValue(KqlBaseParser.FieldQueryValueContext ctx);
/**
- * Visit a parse tree produced by {@link KqlBaseParser#wildcardExpression}.
+ * Visit a parse tree produced by {@link KqlBaseParser#fieldName}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitWildcardExpression(KqlBaseParser.WildcardExpressionContext ctx);
+ T visitFieldName(KqlBaseParser.FieldNameContext ctx);
}
diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java
new file mode 100644
index 0000000000000..f996a953ea7f7
--- /dev/null
+++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/ParserUtils.java
@@ -0,0 +1,254 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.kql.parser;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.tree.ParseTree;
+import org.antlr.v4.runtime.tree.ParseTreeVisitor;
+import org.antlr.v4.runtime.tree.TerminalNode;
+import org.apache.logging.log4j.util.Strings;
+import org.apache.lucene.queryparser.classic.QueryParser;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Utility class for parsing and processing KQL expressions.
+ * Provides methods for type-safe parsing, text extraction, and string escaping/unescaping.
+ */
+public final class ParserUtils {
+
+ private static final String UNQUOTED_LITERAL_TERM_DELIMITER = " ";
+ private static final char ESCAPE_CHAR = '\\';
+ private static final char QUOTE_CHAR = '"';
+ private static final char WILDCARD_CHAR = '*';
+
+ private ParserUtils() {
+ throw new UnsupportedOperationException("No need to instantiate this class");
+ }
+
+ /**
+ * Performs type-safe parsing using the provided visitor.
+ *
+ * @param visitor The visitor to use to do the parsing
+ * @param ctx The parser tree context to visit
+ * @param type The expected return type class
+ * @return The parsed result, casted to the expected type
+ */
+ @SuppressWarnings("unchecked")
+ public static T typedParsing(ParseTreeVisitor> visitor, ParserRuleContext ctx, Class type) {
+ Object result = ctx.accept(visitor);
+
+ if (type.isInstance(result)) {
+ return (T) result;
+ }
+
+ throw new KqlParsingException(
+ "Invalid query '{}'[{}] given; expected {} but found {}",
+ ctx.start.getLine(),
+ ctx.start.getCharPositionInLine(),
+ ctx.getText(),
+ ctx.getClass().getSimpleName(),
+ type.getSimpleName(),
+ (result != null ? result.getClass().getSimpleName() : "null")
+ );
+ }
+
+ /**
+ * Extracts text from a parser tree context by joining all terminal nodes with a space delimiter.
+ *
+ * @param ctx The parser tree context
+ *
+ * @return The extracted text
+ */
+ public static String extractText(ParserRuleContext ctx) {
+ return String.join(UNQUOTED_LITERAL_TERM_DELIMITER, extractTextTokens(ctx));
+ }
+
+ /**
+ * Checks if the given context contains any unescaped wildcard characters.
+ *
+ * @param ctx The tree context to check
+ * @return true if wildcards are present, false otherwise
+ */
+ public static boolean hasWildcard(ParserRuleContext ctx) {
+ return ctx.children.stream().anyMatch(childNode -> {
+ if (childNode instanceof TerminalNode terminalNode) {
+ Token token = terminalNode.getSymbol();
+ return switch (token.getType()) {
+ case KqlBaseParser.WILDCARD -> true;
+ case KqlBaseParser.UNQUOTED_LITERAL -> token.getText().matches("[^\\\\]*[*].*");
+ default -> false;
+ };
+ }
+
+ return false;
+ });
+ }
+
+ /**
+ * Escapes special characters in a query string for use in Lucene queries.
+ *
+ * @param queryText The query text to escape
+ * @param preserveWildcards If true, does not escape wildcard characters (*)
+ * @return The escaped query string
+ */
+ public static String escapeLuceneQueryString(String queryText, boolean preserveWildcards) {
+ if (preserveWildcards) {
+ StringBuilder escapedQuery = new StringBuilder(queryText.length());
+ StringBuilder subpart = new StringBuilder(queryText.length());
+
+ for (char currentChar : queryText.toCharArray()) {
+ if (currentChar == WILDCARD_CHAR) {
+ escapedQuery.append(QueryParser.escape(subpart.toString())).append(currentChar);
+ subpart.setLength(0);
+ } else {
+ subpart.append(currentChar);
+ }
+ }
+
+ return escapedQuery.append(QueryParser.escape(subpart.toString())).toString();
+ }
+
+ return QueryParser.escape(queryText);
+ }
+
+ private static List extractTextTokens(ParserRuleContext ctx) {
+ assert ctx.children != null;
+ List textTokens = new ArrayList<>(ctx.children.size());
+
+ for (ParseTree currentNode : ctx.children) {
+ if (currentNode instanceof TerminalNode terminalNode) {
+ textTokens.add(extractText(terminalNode));
+ } else {
+ throw new KqlParsingException("Unable to extract text from ctx", ctx.start.getLine(), ctx.start.getCharPositionInLine());
+ }
+ }
+
+ return textTokens;
+ }
+
+ private static String extractText(TerminalNode node) {
+ if (node.getSymbol().getType() == KqlBaseParser.QUOTED_STRING) {
+ return unescapeQuotedString(node);
+ } else if (node.getSymbol().getType() == KqlBaseParser.UNQUOTED_LITERAL) {
+ return unescapeUnquotedLiteral(node);
+ }
+
+ return node.getText();
+ }
+
+ private static String unescapeQuotedString(TerminalNode ctx) {
+ String inputText = ctx.getText();
+
+ assert inputText.length() >= 2 && inputText.charAt(0) == QUOTE_CHAR && inputText.charAt(inputText.length() - 1) == QUOTE_CHAR;
+ StringBuilder sb = new StringBuilder();
+
+ for (int i = 1; i < inputText.length() - 1;) {
+ char currentChar = inputText.charAt(i++);
+ if (currentChar == ESCAPE_CHAR && i + 1 < inputText.length()) {
+ currentChar = inputText.charAt(i++);
+ switch (currentChar) {
+ case 't' -> sb.append('\t');
+ case 'n' -> sb.append('\n');
+ case 'r' -> sb.append('\r');
+ case 'u' -> i = handleUnicodeSequemce(ctx, sb, inputText, i);
+ case QUOTE_CHAR -> sb.append('\"');
+ case ESCAPE_CHAR -> sb.append(ESCAPE_CHAR);
+ default -> sb.append(ESCAPE_CHAR).append(currentChar);
+ }
+ } else {
+ sb.append(currentChar);
+ }
+ }
+
+ return sb.toString();
+ }
+
+ private static String unescapeUnquotedLiteral(TerminalNode ctx) {
+ String inputText = ctx.getText();
+
+ if (inputText == null || inputText.isEmpty()) {
+ return inputText;
+ }
+ StringBuilder sb = new StringBuilder(inputText.length());
+
+ for (int i = 0; i < inputText.length();) {
+ char currentChar = inputText.charAt(i++);
+ if (currentChar == ESCAPE_CHAR && i < inputText.length()) {
+ if (isEscapedKeywordSequence(inputText, i)) {
+ String sequence = handleKeywordSequence(inputText, i);
+ sb.append(sequence);
+ i += sequence.length();
+ } else {
+ currentChar = inputText.charAt(i++);
+ switch (currentChar) {
+ case 't' -> sb.append('\t');
+ case 'n' -> sb.append('\n');
+ case 'r' -> sb.append('\r');
+ case 'u' -> i = handleUnicodeSequemce(ctx, sb, inputText, i);
+ case QUOTE_CHAR -> sb.append('\"');
+ case ESCAPE_CHAR -> sb.append(ESCAPE_CHAR);
+ case '(', ')', ':', '<', '>', '*', '{', '}' -> sb.append(currentChar);
+ default -> sb.append(ESCAPE_CHAR).append(currentChar);
+ }
+ }
+ } else {
+ sb.append(currentChar);
+ }
+ }
+
+ return sb.toString();
+ }
+
+ private static boolean isEscapedKeywordSequence(String input, int startIndex) {
+ if (startIndex + 1 >= input.length()) {
+ return false;
+ }
+ String remaining = Strings.toRootLowerCase(input.substring(startIndex));
+ return remaining.startsWith("and") || remaining.startsWith("or") || remaining.startsWith("not");
+ }
+
+ private static String handleKeywordSequence(String input, int startIndex) {
+ String remaining = input.substring(startIndex);
+ if (Strings.toRootLowerCase(remaining).startsWith("and")) return remaining.substring(0, 3);
+ if (Strings.toRootLowerCase(remaining).startsWith("or")) return remaining.substring(0, 2);
+ if (Strings.toRootLowerCase(remaining).startsWith("not")) return remaining.substring(0, 3);
+ return "";
+ }
+
+ private static int handleUnicodeSequemce(TerminalNode ctx, StringBuilder sb, String text, int startIdx) {
+ int endIdx = startIdx + 4;
+ String hex = text.substring(startIdx, endIdx);
+
+ try {
+ int code = Integer.parseInt(hex, 16);
+
+ if (code >= 0xD800 && code <= 0xDFFF) {
+ // U+D800—U+DFFF can only be used as surrogate pairs and are not valid character codes.
+ throw new KqlParsingException(
+ "Invalid unicode character code, [{}] is a surrogate code",
+ ctx.getSymbol().getLine(),
+ ctx.getSymbol().getCharPositionInLine() + startIdx,
+ hex
+ );
+ }
+ sb.append(String.valueOf(Character.toChars(code)));
+ } catch (IllegalArgumentException e) {
+ throw new KqlParsingException(
+ "Invalid unicode character code [{}]",
+ ctx.getSymbol().getLine(),
+ ctx.getSymbol().getCharPositionInLine() + startIdx,
+ hex
+ );
+ }
+
+ return endIdx;
+ }
+}
diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/ParserUtilsTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/ParserUtilsTests.java
new file mode 100644
index 0000000000000..05474bcedd4c8
--- /dev/null
+++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/parser/ParserUtilsTests.java
@@ -0,0 +1,280 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.kql.parser;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.tree.ParseTree;
+import org.antlr.v4.runtime.tree.TerminalNode;
+import org.antlr.v4.runtime.tree.TerminalNodeImpl;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Stream;
+
+import static org.elasticsearch.core.Strings.format;
+import static org.elasticsearch.xpack.kql.parser.KqlBaseParser.QUOTED_STRING;
+import static org.elasticsearch.xpack.kql.parser.KqlBaseParser.UNQUOTED_LITERAL;
+import static org.elasticsearch.xpack.kql.parser.KqlBaseParser.WILDCARD;
+import static org.elasticsearch.xpack.kql.parser.ParserUtils.escapeLuceneQueryString;
+import static org.elasticsearch.xpack.kql.parser.ParserUtils.extractText;
+import static org.elasticsearch.xpack.kql.parser.ParserUtils.hasWildcard;
+import static org.hamcrest.Matchers.equalTo;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class ParserUtilsTests extends ESTestCase {
+
+ public void testExtractTestWithQuotedString() {
+ // General case
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo"))), equalTo("foo"));
+
+ // Empty string
+ assertThat(extractText(parserRuleContext(quotedStringNode(""))), equalTo(""));
+
+ // Whitespaces are preserved
+ assertThat(extractText(parserRuleContext(quotedStringNode(" foo bar "))), equalTo(" foo bar "));
+
+ // Quoted string does not need escaping for KQL keywords (and, or, ...)
+ assertThat(extractText(parserRuleContext(quotedStringNode("not foo and bar or baz"))), equalTo("not foo and bar or baz"));
+
+ // Quoted string does not need escaping for KQL special chars (e.g: '{', ':', ...)
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo*:'\u3000{(})"))), equalTo("foo*:'\u3000{(})"));
+
+ // Escaped characters handling
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\\\"))), equalTo("\\"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\\bar"))), equalTo("foo\\bar"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\\"))), equalTo("foo\\"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\\\foo"))), equalTo("\\foo"));
+
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\\""))), equalTo("\""));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\"bar"))), equalTo("foo\"bar"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\\""))), equalTo("foo\""));
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\\"foo"))), equalTo("\"foo"));
+
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\t"))), equalTo("\t"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\tbar"))), equalTo("foo\tbar"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\t"))), equalTo("foo\t"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\tfoo"))), equalTo("\tfoo"));
+
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\n"))), equalTo("\n"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\nbar"))), equalTo("foo\nbar"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\n"))), equalTo("foo\n"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\nfoo"))), equalTo("\nfoo"));
+
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\r"))), equalTo("\r"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\rbar"))), equalTo("foo\rbar"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("foo\\r"))), equalTo("foo\r"));
+ assertThat(extractText(parserRuleContext(quotedStringNode("\\rfoo"))), equalTo("\rfoo"));
+
+ // Unicode characters handling (\u0041 is 'A')
+ assertThat(extractText(parserRuleContext(quotedStringNode(format("\\u0041")))), equalTo("A"));
+ assertThat(extractText(parserRuleContext(quotedStringNode(format("foo\\u0041bar")))), equalTo("fooAbar"));
+ assertThat(extractText(parserRuleContext(quotedStringNode(format("foo\\u0041")))), equalTo("fooA"));
+ assertThat(extractText(parserRuleContext(quotedStringNode(format("\\u0041foo")))), equalTo("Afoo"));
+ }
+
+ public void testExtractTestWithUnquotedLiteral() {
+ // General case
+ assertThat(extractText(parserRuleContext(literalNode("foo"))), equalTo("foo"));
+
+ // KQL keywords unescaping
+ assertThat(extractText(parserRuleContext(literalNode("\\not foo \\and bar \\or baz"))), equalTo("not foo and bar or baz"));
+ assertThat(
+ extractText(parserRuleContext(literalNode("\\\\not foo \\\\and bar \\\\or baz"))),
+ equalTo("\\not foo \\and bar \\or baz")
+ );
+
+ // Escaped characters handling
+ assertThat(extractText(parserRuleContext(literalNode("\\\\"))), equalTo("\\"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\\\bar"))), equalTo("foo\\bar"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\\\"))), equalTo("foo\\"));
+ assertThat(extractText(parserRuleContext(literalNode("\\\\foo"))), equalTo("\\foo"));
+
+ assertThat(extractText(parserRuleContext(literalNode("\\\""))), equalTo("\""));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\\"bar"))), equalTo("foo\"bar"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\\""))), equalTo("foo\""));
+ assertThat(extractText(parserRuleContext(literalNode("\\\"foo"))), equalTo("\"foo"));
+
+ assertThat(extractText(parserRuleContext(literalNode("\\t"))), equalTo("\t"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\tbar"))), equalTo("foo\tbar"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\t"))), equalTo("foo\t"));
+ assertThat(extractText(parserRuleContext(literalNode("\\tfoo"))), equalTo("\tfoo"));
+
+ assertThat(extractText(parserRuleContext(literalNode("\\n"))), equalTo("\n"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\nbar"))), equalTo("foo\nbar"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\n"))), equalTo("foo\n"));
+ assertThat(extractText(parserRuleContext(literalNode("\\nfoo"))), equalTo("\nfoo"));
+
+ assertThat(extractText(parserRuleContext(literalNode("\\r"))), equalTo("\r"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\rbar"))), equalTo("foo\rbar"));
+ assertThat(extractText(parserRuleContext(literalNode("foo\\r"))), equalTo("foo\r"));
+ assertThat(extractText(parserRuleContext(literalNode("\\rfoo"))), equalTo("\rfoo"));
+
+ for (String escapedChar : List.of("(", ")", ":", "<", ">", "*", "{", "}")) {
+ assertThat(extractText(parserRuleContext(literalNode(format("\\%s", escapedChar)))), equalTo(escapedChar));
+ assertThat(
+ extractText(parserRuleContext(literalNode(format("foo\\%sbar", escapedChar)))),
+ equalTo(format("foo%sbar", escapedChar))
+ );
+ assertThat(extractText(parserRuleContext(literalNode(format("foo\\%s", escapedChar)))), equalTo(format("foo%s", escapedChar)));
+ assertThat(extractText(parserRuleContext(literalNode(format("\\%sfoo", escapedChar)))), equalTo(format("%sfoo", escapedChar)));
+ }
+
+ // Unicode characters handling (\u0041 is 'A')
+ assertThat(extractText(parserRuleContext(literalNode(format("\\u0041")))), equalTo("A"));
+ assertThat(extractText(parserRuleContext(literalNode(format("foo\\u0041bar")))), equalTo("fooAbar"));
+ assertThat(extractText(parserRuleContext(literalNode(format("foo\\u0041")))), equalTo("fooA"));
+ assertThat(extractText(parserRuleContext(literalNode(format("\\u0041foo")))), equalTo("Afoo"));
+ }
+
+ public void testHasWildcard() {
+ // No children
+ assertFalse(hasWildcard(parserRuleContext(List.of())));
+
+ // Lone wildcard
+ assertTrue(hasWildcard(parserRuleContext(wildcardNode())));
+ assertTrue(hasWildcard(parserRuleContext(randomTextNodeListWithNode(wildcardNode()))));
+
+ // All children are literals
+ assertFalse(hasWildcard(parserRuleContext(randomList(1, randomIntBetween(1, 100), ParserUtilsTests::randomLiteralNode))));
+
+ // Quoted string
+ assertFalse(hasWildcard(parserRuleContext(randomQuotedStringNode())));
+
+ // Literal node containing the wildcard character
+ assertTrue(hasWildcard(parserRuleContext(literalNode("f*oo"))));
+ assertTrue(hasWildcard(parserRuleContext(literalNode("*foo"))));
+ assertTrue(hasWildcard(parserRuleContext(literalNode("foo*"))));
+
+ // Literal node containing the wildcard characters (escaped)
+ assertFalse(hasWildcard(parserRuleContext(literalNode("f\\*oo"))));
+ assertFalse(hasWildcard(parserRuleContext(literalNode("\\*foo"))));
+ assertFalse(hasWildcard(parserRuleContext(literalNode("foo\\*"))));
+ }
+
+ public void testUnquotedLiteralInvalidUnicodeCodeParsing() {
+ {
+ // Invalid unicode digit (G)
+ ParserRuleContext ctx = parserRuleContext(literalNode("\\u0G41"));
+ KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx));
+ assertThat(e.getMessage(), equalTo("line 0:3: Invalid unicode character code [0G41]"));
+ }
+
+ {
+ // U+D800—U+DFFF can only be used as surrogate pairs and are not valid character codes.
+ ParserRuleContext ctx = parserRuleContext(literalNode("\\uD900"));
+ KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx));
+ assertThat(e.getMessage(), equalTo("line 0:3: Invalid unicode character code, [D900] is a surrogate code"));
+ }
+ }
+
+ public void testQuotedStringInvalidUnicodeCodeParsing() {
+ {
+ // Invalid unicode digit (G)
+ ParserRuleContext ctx = parserRuleContext(quotedStringNode("\\u0G41"));
+ KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx));
+ assertThat(e.getMessage(), equalTo("line 0:4: Invalid unicode character code [0G41]"));
+ }
+
+ {
+ // U+D800—U+DFFF can only be used as surrogate pairs and are not valid character codes.
+ ParserRuleContext ctx = parserRuleContext(quotedStringNode("\\uD900"));
+ KqlParsingException e = assertThrows(KqlParsingException.class, () -> extractText(ctx));
+ assertThat(e.getMessage(), equalTo("line 0:4: Invalid unicode character code, [D900] is a surrogate code"));
+ }
+ }
+
+ public void testEscapeLuceneQueryString() {
+ // Quotes
+ assertThat(escapeLuceneQueryString("\"The Pink Panther\"", randomBoolean()), equalTo("\\\"The Pink Panther\\\""));
+
+ // Escape chars
+ assertThat(escapeLuceneQueryString("The Pink \\ Panther", randomBoolean()), equalTo("The Pink \\\\ Panther"));
+
+ // Field operations
+ assertThat(escapeLuceneQueryString("title:Do it right", randomBoolean()), equalTo("title\\:Do it right"));
+ assertThat(escapeLuceneQueryString("title:(pink panther)", randomBoolean()), equalTo("title\\:\\(pink panther\\)"));
+ assertThat(escapeLuceneQueryString("title:-pink", randomBoolean()), equalTo("title\\:\\-pink"));
+ assertThat(escapeLuceneQueryString("title:+pink", randomBoolean()), equalTo("title\\:\\+pink"));
+ assertThat(escapeLuceneQueryString("title:pink~", randomBoolean()), equalTo("title\\:pink\\~"));
+ assertThat(escapeLuceneQueryString("title:pink~3.5", randomBoolean()), equalTo("title\\:pink\\~3.5"));
+ assertThat(escapeLuceneQueryString("title:pink panther^4", randomBoolean()), equalTo("title\\:pink panther\\^4"));
+ assertThat(escapeLuceneQueryString("rating:[0 TO 5]", randomBoolean()), equalTo("rating\\:\\[0 TO 5\\]"));
+ assertThat(escapeLuceneQueryString("rating:{0 TO 5}", randomBoolean()), equalTo("rating\\:\\{0 TO 5\\}"));
+
+ // Boolean operators
+ assertThat(escapeLuceneQueryString("foo || bar", randomBoolean()), equalTo("foo \\|\\| bar"));
+ assertThat(escapeLuceneQueryString("foo && bar", randomBoolean()), equalTo("foo \\&\\& bar"));
+ assertThat(escapeLuceneQueryString("!foo", randomBoolean()), equalTo("\\!foo"));
+
+ // Wildcards:
+ assertThat(escapeLuceneQueryString("te?t", randomBoolean()), equalTo("te\\?t"));
+ assertThat(escapeLuceneQueryString("foo*", true), equalTo("foo*"));
+ assertThat(escapeLuceneQueryString("*foo", true), equalTo("*foo"));
+ assertThat(escapeLuceneQueryString("foo * bar", true), equalTo("foo * bar"));
+ assertThat(escapeLuceneQueryString("foo*", false), equalTo("foo\\*"));
+ }
+
+ private static ParserRuleContext parserRuleContext(ParseTree child) {
+ return parserRuleContext(List.of(child));
+ }
+
+ private static ParserRuleContext parserRuleContext(List children) {
+ ParserRuleContext ctx = new ParserRuleContext(null, randomInt());
+ ctx.children = children;
+ return ctx;
+ }
+
+ private static TerminalNode terminalNode(int type, String text) {
+ Token symbol = mock(Token.class);
+ when(symbol.getType()).thenReturn(type);
+ when(symbol.getText()).thenReturn(text);
+ when(symbol.getLine()).thenReturn(0);
+ when(symbol.getCharPositionInLine()).thenReturn(0);
+ return new TerminalNodeImpl(symbol);
+ }
+
+ private static List randomTextNodeListWithNode(TerminalNode node) {
+ List nodes = new ArrayList<>(
+ Stream.concat(Stream.generate(ParserUtilsTests::randomTextNode).limit(100), Stream.of(node)).toList()
+ );
+ Collections.shuffle(nodes, random());
+ return nodes;
+ }
+
+ private static TerminalNode randomTextNode() {
+ return switch (randomInt() % 3) {
+ case 0 -> wildcardNode();
+ case 1 -> randomQuotedStringNode();
+ default -> randomLiteralNode();
+ };
+ }
+
+ private static TerminalNode quotedStringNode(String quotedStringText) {
+ return terminalNode(QUOTED_STRING, "\"" + quotedStringText + "\"");
+ }
+
+ private static TerminalNode randomQuotedStringNode() {
+ return quotedStringNode(randomIdentifier());
+ }
+
+ private static TerminalNode literalNode(String literalText) {
+ return terminalNode(UNQUOTED_LITERAL, literalText);
+ }
+
+ private static TerminalNode randomLiteralNode() {
+ return terminalNode(UNQUOTED_LITERAL, randomIdentifier());
+ }
+
+ private static TerminalNode wildcardNode() {
+ return terminalNode(WILDCARD, "*");
+ }
+}
diff --git a/x-pack/plugin/kql/src/test/resources/supported-queries b/x-pack/plugin/kql/src/test/resources/supported-queries
index d750f16149112..d9378cf9041c2 100644
--- a/x-pack/plugin/kql/src/test/resources/supported-queries
+++ b/x-pack/plugin/kql/src/test/resources/supported-queries
@@ -68,6 +68,15 @@ foo_field:foo AND (foo_field:foo bar OR foo bar)
foo_field:foo AND (foo_field:foo bar OR foo bar)
foo_field:foo OR (foo_field:foo bar OR foo bar)
+foo:AND
+foo:OR
+foo:NOT
+foo AND
+foo OR
+AND foo
+OR foo
+NOT
+
// Nested queries
nested_field: { NOT foo }
nested_field: { NOT foo bar }
diff --git a/x-pack/plugin/kql/src/test/resources/unsupported-queries b/x-pack/plugin/kql/src/test/resources/unsupported-queries
index 545b03576b331..97a26f16db141 100644
--- a/x-pack/plugin/kql/src/test/resources/unsupported-queries
+++ b/x-pack/plugin/kql/src/test/resources/unsupported-queries
@@ -16,14 +16,6 @@ NOT (foo_field:foo AND) foo_field:foo bar
foo_field: "foo bar
foo_field: foo bar"
-
-// Invalid boolean queries
-foo AND
-AND foo
-foo OR
-OR foo
-NOT foo:
-
// Can't nest grouping terms parentheses
foo_field:(foo (bar))
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenceDowngradeIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenceDowngradeIT.java
new file mode 100644
index 0000000000000..f004189098c43
--- /dev/null
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenceDowngradeIT.java
@@ -0,0 +1,489 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.logsdb;
+
+import org.elasticsearch.index.mapper.SourceFieldMapper;
+
+import java.io.IOException;
+import java.util.List;
+
+public class DataStreamLicenceDowngradeIT extends DataStreamLicenseChangeIT {
+ @Override
+ protected void applyInitialLicense() throws IOException {
+ startTrial();
+ }
+
+ @Override
+ protected void licenseChange() throws IOException {
+ startBasic();
+ }
+
+ @Override
+ protected List cases() {
+ return List.of(new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "logs-test-regular";
+ }
+
+ @Override
+ public String indexMode() {
+ return "logsdb";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }, new TestCase() {
+ private static final String sourceModeOverride = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "SYNTHETIC"
+ }
+ }
+ }
+ }""";
+
+ @Override
+ public String dataStreamName() {
+ return "logs-test-explicit-synthetic";
+ }
+
+ @Override
+ public String indexMode() {
+ return "logsdb";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ assertOK(createDataStream(client(), dataStreamName()));
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ rolloverDataStream(client(), dataStreamName());
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }, new TestCase() {
+ private static final String sourceModeOverride = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "STORED"
+ }
+ }
+ }
+ }""";
+
+ @Override
+ public String dataStreamName() {
+ return "logs-test-explicit-stored";
+ }
+
+ @Override
+ public String indexMode() {
+ return "logsdb";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ assertOK(createDataStream(client(), dataStreamName()));
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ rolloverDataStream(client(), dataStreamName());
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }, new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "tsdb-test-regular";
+ }
+
+ @Override
+ public String indexMode() {
+ return "time_series";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mode": "time_series",
+ "routing_path": ["dim"]
+ }
+ },
+ "mappings": {
+ "properties": {
+ "dim": {
+ "type": "keyword",
+ "time_series_dimension": true
+ }
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "tsdb-test-regular-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["tsdb-test-regular"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["tsdb-test-regular-component"]
+ }
+ """;
+
+ putTemplate(client(), "tsdb-test-regular-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }, new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "tsdb-test-synthetic";
+ }
+
+ @Override
+ public String indexMode() {
+ return "time_series";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mode": "time_series",
+ "routing_path": ["dim"],
+ "mapping.source.mode": "SYNTHETIC"
+ }
+ },
+ "mappings": {
+ "properties": {
+ "dim": {
+ "type": "keyword",
+ "time_series_dimension": true
+ }
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "tsdb-test-synthetic-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["tsdb-test-synthetic"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["tsdb-test-synthetic-component"]
+ }
+ """;
+
+ putTemplate(client(), "tsdb-test-synthetic-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }, new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "tsdb-test-stored";
+ }
+
+ @Override
+ public String indexMode() {
+ return "time_series";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mode": "time_series",
+ "routing_path": ["dim"],
+ "mapping.source.mode": "STORED"
+ }
+ },
+ "mappings": {
+ "properties": {
+ "dim": {
+ "type": "keyword",
+ "time_series_dimension": true
+ }
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "tsdb-test-stored-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["tsdb-test-stored"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["tsdb-test-stored-component"]
+ }
+ """;
+
+ putTemplate(client(), "tsdb-test-stored-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ },
+
+ new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "standard";
+ }
+
+ @Override
+ public String indexMode() {
+ return "standard";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var template = """
+ {
+ "index_patterns": ["standard"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": []
+ }
+ """;
+
+ putTemplate(client(), "standard-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ },
+ new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "standard-synthetic";
+ }
+
+ @Override
+ public String indexMode() {
+ return "standard";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "SYNTHETIC"
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "standard-synthetic-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["standard-synthetic"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["standard-synthetic-component"]
+ }
+ """;
+
+ putTemplate(client(), "standard-synthetic-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ },
+ new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "standard-stored";
+ }
+
+ @Override
+ public String indexMode() {
+ return "standard";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "STORED"
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "standard-stored-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["standard-stored"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["standard-stored-component"]
+ }
+ """;
+
+ putTemplate(client(), "standard-stored-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }
+ );
+ }
+}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseChangeIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseChangeIT.java
new file mode 100644
index 0000000000000..b84c982766e4b
--- /dev/null
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseChangeIT.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.logsdb;
+
+import org.elasticsearch.client.Request;
+import org.elasticsearch.client.Response;
+import org.elasticsearch.client.RestClient;
+import org.elasticsearch.index.mapper.SourceFieldMapper;
+import org.elasticsearch.test.cluster.ElasticsearchCluster;
+import org.elasticsearch.test.cluster.local.distribution.DistributionType;
+import org.junit.ClassRule;
+
+import java.io.IOException;
+import java.util.List;
+
+public abstract class DataStreamLicenseChangeIT extends LogsIndexModeRestTestIT {
+ @ClassRule
+ public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
+ .distribution(DistributionType.DEFAULT)
+ .module("data-streams")
+ .module("x-pack-stack")
+ .setting("cluster.logsdb.enabled", "true")
+ .setting("xpack.security.enabled", "false")
+ .setting("xpack.license.self_generated.type", "basic")
+ .build();
+
+ @Override
+ protected String getTestRestCluster() {
+ return cluster.getHttpAddresses();
+ }
+
+ protected interface TestCase {
+ String dataStreamName();
+
+ void prepareDataStream() throws IOException;
+
+ String indexMode();
+
+ SourceFieldMapper.Mode initialMode();
+
+ SourceFieldMapper.Mode finalMode();
+
+ void rollover() throws IOException;
+ }
+
+ protected abstract void licenseChange() throws IOException;
+
+ protected abstract void applyInitialLicense() throws IOException;
+
+ protected abstract List cases();
+
+ public void testLicenseChange() throws IOException {
+ applyInitialLicense();
+
+ for (var testCase : cases()) {
+ testCase.prepareDataStream();
+
+ var indexMode = (String) getSetting(client(), getDataStreamBackingIndex(client(), testCase.dataStreamName(), 0), "index.mode");
+ assertEquals(testCase.indexMode(), indexMode);
+
+ var sourceMode = (String) getSetting(
+ client(),
+ getDataStreamBackingIndex(client(), testCase.dataStreamName(), 0),
+ "index.mapping.source.mode"
+ );
+ assertEquals(testCase.initialMode().toString(), sourceMode);
+ }
+
+ licenseChange();
+
+ for (var testCase : cases()) {
+ testCase.rollover();
+
+ var indexMode = (String) getSetting(client(), getDataStreamBackingIndex(client(), testCase.dataStreamName(), 1), "index.mode");
+ assertEquals(testCase.indexMode(), indexMode);
+
+ var sourceMode = (String) getSetting(
+ client(),
+ getDataStreamBackingIndex(client(), testCase.dataStreamName(), 1),
+ "index.mapping.source.mode"
+ );
+ assertEquals(testCase.finalMode().toString(), sourceMode);
+ }
+ }
+
+ protected static void startBasic() throws IOException {
+ Request startTrial = new Request("POST", "/_license/start_basic");
+ startTrial.addParameter("acknowledge", "true");
+ assertOK(client().performRequest(startTrial));
+ }
+
+ protected static void startTrial() throws IOException {
+ Request startTrial = new Request("POST", "/_license/start_trial");
+ startTrial.addParameter("acknowledge", "true");
+ assertOK(client().performRequest(startTrial));
+ }
+
+ protected static Response removeComponentTemplate(final RestClient client, final String componentTemplate) throws IOException {
+ final Request request = new Request("DELETE", "/_component_template/" + componentTemplate);
+ return client.performRequest(request);
+ }
+}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseUpgradeIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseUpgradeIT.java
new file mode 100644
index 0000000000000..bce43ca046523
--- /dev/null
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/DataStreamLicenseUpgradeIT.java
@@ -0,0 +1,487 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.logsdb;
+
+import org.elasticsearch.index.mapper.SourceFieldMapper;
+
+import java.io.IOException;
+import java.util.List;
+
+public class DataStreamLicenseUpgradeIT extends DataStreamLicenseChangeIT {
+ @Override
+ protected void applyInitialLicense() {}
+
+ @Override
+ protected void licenseChange() throws IOException {
+ startTrial();
+ }
+
+ @Override
+ protected List cases() {
+ return List.of(new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "logs-test-regular";
+ }
+
+ @Override
+ public String indexMode() {
+ return "logsdb";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+ }, new TestCase() {
+ private static final String sourceModeOverride = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "SYNTHETIC"
+ }
+ }
+ }
+ }""";
+
+ @Override
+ public String dataStreamName() {
+ return "logs-test-explicit-synthetic";
+ }
+
+ @Override
+ public String indexMode() {
+ return "logsdb";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ assertOK(createDataStream(client(), dataStreamName()));
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ rolloverDataStream(client(), dataStreamName());
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+ }, new TestCase() {
+ private static final String sourceModeOverride = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "STORED"
+ }
+ }
+ }
+ }""";
+
+ @Override
+ public String dataStreamName() {
+ return "logs-test-explicit-stored";
+ }
+
+ @Override
+ public String indexMode() {
+ return "logsdb";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ assertOK(createDataStream(client(), dataStreamName()));
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ assertOK(putComponentTemplate(client(), "logs@custom", sourceModeOverride));
+ rolloverDataStream(client(), dataStreamName());
+ assertOK(removeComponentTemplate(client(), "logs@custom"));
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }, new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "tsdb-test-regular";
+ }
+
+ @Override
+ public String indexMode() {
+ return "time_series";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mode": "time_series",
+ "routing_path": ["dim"]
+ }
+ },
+ "mappings": {
+ "properties": {
+ "dim": {
+ "type": "keyword",
+ "time_series_dimension": true
+ }
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "tsdb-test-regular-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["tsdb-test-regular"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["tsdb-test-regular-component"]
+ }
+ """;
+
+ putTemplate(client(), "tsdb-test-regular-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+ }, new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "tsdb-test-synthetic";
+ }
+
+ @Override
+ public String indexMode() {
+ return "time_series";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mode": "time_series",
+ "routing_path": ["dim"],
+ "mapping.source.mode": "SYNTHETIC"
+ }
+ },
+ "mappings": {
+ "properties": {
+ "dim": {
+ "type": "keyword",
+ "time_series_dimension": true
+ }
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "tsdb-test-synthetic-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["tsdb-test-synthetic"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["tsdb-test-synthetic-component"]
+ }
+ """;
+
+ putTemplate(client(), "tsdb-test-synthetic-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+ }, new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "tsdb-test-stored";
+ }
+
+ @Override
+ public String indexMode() {
+ return "time_series";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mode": "time_series",
+ "routing_path": ["dim"],
+ "mapping.source.mode": "STORED"
+ }
+ },
+ "mappings": {
+ "properties": {
+ "dim": {
+ "type": "keyword",
+ "time_series_dimension": true
+ }
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "tsdb-test-stored-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["tsdb-test-stored"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["tsdb-test-stored-component"]
+ }
+ """;
+
+ putTemplate(client(), "tsdb-test-stored-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ },
+
+ new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "standard";
+ }
+
+ @Override
+ public String indexMode() {
+ return "standard";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var template = """
+ {
+ "index_patterns": ["standard"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": []
+ }
+ """;
+
+ putTemplate(client(), "standard-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ },
+ new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "standard-synthetic";
+ }
+
+ @Override
+ public String indexMode() {
+ return "standard";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "SYNTHETIC"
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "standard-synthetic-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["standard-synthetic"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["standard-synthetic-component"]
+ }
+ """;
+
+ putTemplate(client(), "standard-synthetic-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.SYNTHETIC;
+ }
+ },
+ new TestCase() {
+ @Override
+ public String dataStreamName() {
+ return "standard-stored";
+ }
+
+ @Override
+ public String indexMode() {
+ return "standard";
+ }
+
+ @Override
+ public void prepareDataStream() throws IOException {
+ var componentTemplate = """
+ {
+ "template": {
+ "settings": {
+ "index": {
+ "mapping.source.mode": "STORED"
+ }
+ }
+ }
+ }
+ """;
+ assertOK(putComponentTemplate(client(), "standard-stored-component", componentTemplate));
+
+ var template = """
+ {
+ "index_patterns": ["standard-stored"],
+ "priority": 100,
+ "data_stream": {},
+ "composed_of": ["standard-stored-component"]
+ }
+ """;
+
+ putTemplate(client(), "standard-stored-template", template);
+ assertOK(createDataStream(client(), dataStreamName()));
+ }
+
+ @Override
+ public void rollover() throws IOException {
+ rolloverDataStream(client(), dataStreamName());
+ }
+
+ @Override
+ public SourceFieldMapper.Mode initialMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+
+ @Override
+ public SourceFieldMapper.Mode finalMode() {
+ return SourceFieldMapper.Mode.STORED;
+ }
+ }
+ );
+ }
+}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java
index c03e8aea9c2ac..8a5bb8d12cd3d 100644
--- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/DataGenerationHelper.java
@@ -89,6 +89,7 @@ void standardMapping(XContentBuilder builder) throws IOException {
}
void logsDbSettings(Settings.Builder builder) {
+ builder.put("index.mode", "logsdb");
if (keepArraySource) {
builder.put(Mapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING.getKey(), "arrays");
}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java
index 0329f7723a108..d9abdc2cde446 100644
--- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT.java
@@ -7,17 +7,10 @@
package org.elasticsearch.xpack.logsdb.qa;
-import org.elasticsearch.client.Request;
-import org.elasticsearch.client.Response;
-import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
-import java.util.List;
-import java.util.Locale;
-
-import static org.hamcrest.Matchers.equalTo;
/**
* This test compares behavior of a logsdb data stream and a standard index mode data stream
@@ -52,27 +45,4 @@ public void baselineMappings(XContentBuilder builder) throws IOException {
public void contenderMappings(XContentBuilder builder) throws IOException {
dataGenerationHelper.standardMapping(builder);
}
-
- @Override
- public Response indexContenderDocuments(CheckedSupplier, IOException> documentsSupplier) throws IOException {
- var reindexRequest = new Request("POST", "/_reindex?refresh=true");
- reindexRequest.setJsonEntity(String.format(Locale.ROOT, """
- {
- "source": {
- "index": "%s"
- },
- "dest": {
- "index": "%s",
- "op_type": "create"
- }
- }
- """, getBaselineDataStreamName(), getContenderDataStreamName()));
- var response = client.performRequest(reindexRequest);
- assertOK(response);
-
- var body = entityAsMap(response);
- assertThat("encountered failures when performing reindex:\n " + body, body.get("failures"), equalTo(List.of()));
-
- return response;
- }
}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedIntoStoredSourceChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedIntoStoredSourceChallengeRestIT.java
new file mode 100644
index 0000000000000..776a6faf7fa07
--- /dev/null
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedIntoStoredSourceChallengeRestIT.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.logsdb.qa;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.xcontent.XContentBuilder;
+
+import java.io.IOException;
+
+/**
+ * This test compares behavior of a standard mode data stream and a logsdb data stream using stored source.
+ * There should be no differences between such two data streams.
+ */
+public class LogsDbVersusReindexedIntoStoredSourceChallengeRestIT extends ReindexChallengeRestIT {
+ public String getBaselineDataStreamName() {
+ return "logs-apache-baseline";
+ }
+
+ public String getContenderDataStreamName() {
+ return "logs-apache-reindexed";
+ }
+
+ @Override
+ public void baselineSettings(Settings.Builder builder) {
+ dataGenerationHelper.logsDbSettings(builder);
+ }
+
+ @Override
+ public void contenderSettings(Settings.Builder builder) {
+ dataGenerationHelper.logsDbSettings(builder);
+ builder.put("index.mapping.source.mode", "stored");
+ }
+
+ @Override
+ public void baselineMappings(XContentBuilder builder) throws IOException {
+ dataGenerationHelper.logsDbMapping(builder);
+ }
+
+ @Override
+ public void contenderMappings(XContentBuilder builder) throws IOException {
+ dataGenerationHelper.logsDbMapping(builder);
+ }
+}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java
index 1c425cf30907b..8b00c647b5dd0 100644
--- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/LogsDbVersusReindexedLogsDbChallengeRestIT.java
@@ -7,17 +7,10 @@
package org.elasticsearch.xpack.logsdb.qa;
-import org.elasticsearch.client.Request;
-import org.elasticsearch.client.Response;
-import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
-import java.util.List;
-import java.util.Locale;
-
-import static org.hamcrest.Matchers.equalTo;
/**
* This test compares behavior of a logsdb data stream and a data stream containing
@@ -52,27 +45,4 @@ public void baselineMappings(XContentBuilder builder) throws IOException {
public void contenderMappings(XContentBuilder builder) throws IOException {
dataGenerationHelper.logsDbMapping(builder);
}
-
- @Override
- public Response indexContenderDocuments(CheckedSupplier, IOException> documentsSupplier) throws IOException {
- var reindexRequest = new Request("POST", "/_reindex?refresh=true");
- reindexRequest.setJsonEntity(String.format(Locale.ROOT, """
- {
- "source": {
- "index": "%s"
- },
- "dest": {
- "index": "%s",
- "op_type": "create"
- }
- }
- """, getBaselineDataStreamName(), getContenderDataStreamName()));
- var response = client.performRequest(reindexRequest);
- assertOK(response);
-
- var body = entityAsMap(response);
- assertThat("encountered failures when performing reindex:\n " + body, body.get("failures"), equalTo(List.of()));
-
- return response;
- }
}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java
new file mode 100644
index 0000000000000..2f018b7dc0b38
--- /dev/null
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsStoredSourceChallengeRestIT.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.logsdb.qa;
+
+import org.elasticsearch.common.settings.Settings;
+
+/**
+ * This test compares behavior of a standard mode data stream and a logsdb data stream using stored source.
+ * There should be no differences between such two data streams.
+ */
+public class StandardVersusLogsStoredSourceChallengeRestIT extends StandardVersusLogsIndexModeRandomDataChallengeRestIT {
+ @Override
+ public void contenderSettings(Settings.Builder builder) {
+ super.contenderSettings(builder);
+ builder.put("index.mapping.source.mode", "stored");
+ }
+}
diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT.java
new file mode 100644
index 0000000000000..a0672daafb243
--- /dev/null
+++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.logsdb.qa;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.xcontent.XContentBuilder;
+
+import java.io.IOException;
+
+/**
+ * This test compares behavior of a logsdb data stream using stored source and a logsdb data stream
+ * containing data reindexed from initial data stream.
+ * There should be no differences between such two data streams.
+ */
+public class StoredSourceLogsDbVersusReindexedLogsDbChallengeRestIT extends ReindexChallengeRestIT {
+ public String getBaselineDataStreamName() {
+ return "logs-apache-baseline";
+ }
+
+ public String getContenderDataStreamName() {
+ return "logs-apache-reindexed";
+ }
+
+ @Override
+ public void baselineSettings(Settings.Builder builder) {
+ dataGenerationHelper.logsDbSettings(builder);
+ builder.put("index.mapping.source.mode", "stored");
+ }
+
+ @Override
+ public void contenderSettings(Settings.Builder builder) {
+ dataGenerationHelper.logsDbSettings(builder);
+ }
+
+ @Override
+ public void baselineMappings(XContentBuilder builder) throws IOException {
+ dataGenerationHelper.logsDbMapping(builder);
+ }
+
+ @Override
+ public void contenderMappings(XContentBuilder builder) throws IOException {
+ dataGenerationHelper.logsDbMapping(builder);
+ }
+}
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java
index 7e88cad88dcec..bb973bf4359e8 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java
@@ -26,7 +26,6 @@
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.license.MockLicenseState;
import org.elasticsearch.plugins.IngestPlugin;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.MachineLearningField;
@@ -139,7 +138,6 @@ public void setUpVariables() {
Collections.singletonList(SKINNY_INGEST_PLUGIN),
client,
null,
- DocumentParsingProvider.EMPTY_INSTANCE,
FailureStoreMetrics.NOOP
);
}
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java
index 5710b031494bf..c2e9a92e45353 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java
@@ -115,7 +115,6 @@
import org.elasticsearch.indices.TestIndexNameExpressionResolver;
import org.elasticsearch.license.MockLicenseState;
import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.plugins.internal.DocumentParsingProvider;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
@@ -1580,7 +1579,7 @@ public void testDenialErrorMessagesForBulkIngest() throws Exception {
TransportShardBulkAction.performOnPrimary(
request,
indexShard,
- new UpdateHelper(mock(ScriptService.class), DocumentParsingProvider.EMPTY_INSTANCE),
+ new UpdateHelper(mock(ScriptService.class)),
System::currentTimeMillis,
mappingUpdater,
waitForMappingUpdate,